R Markdown

library(dplyr)
## 
## Attaching package: 'dplyr'
## The following objects are masked from 'package:stats':
## 
##     filter, lag
## The following objects are masked from 'package:base':
## 
##     intersect, setdiff, setequal, union
library(coefplot)
## Loading required package: ggplot2
library(gridExtra)
## 
## Attaching package: 'gridExtra'
## The following object is masked from 'package:dplyr':
## 
##     combine
library(tidyverse)
## ── Attaching core tidyverse packages ──────────────────────── tidyverse 2.0.0 ──
## ✔ forcats   1.0.0     ✔ stringr   1.5.0
## ✔ lubridate 1.9.2     ✔ tibble    3.2.1
## ✔ purrr     1.0.2     ✔ tidyr     1.3.0
## ✔ readr     2.1.4
## ── Conflicts ────────────────────────────────────────── tidyverse_conflicts() ──
## ✖ gridExtra::combine() masks dplyr::combine()
## ✖ dplyr::filter()      masks stats::filter()
## ✖ dplyr::lag()         masks stats::lag()
## ℹ Use the conflicted package (<http://conflicted.r-lib.org/>) to force all conflicts to become errors
library(iterators)
library(caret)
## Loading required package: lattice
## 
## Attaching package: 'caret'
## 
## The following object is masked from 'package:purrr':
## 
##     lift
library(parallel)
library(doParallel)
## Loading required package: foreach
## 
## Attaching package: 'foreach'
## 
## The following objects are masked from 'package:purrr':
## 
##     accumulate, when
df <- readr::read_csv("paint_project_train_data.csv", col_names = TRUE)
## Rows: 835 Columns: 8
## ── Column specification ────────────────────────────────────────────────────────
## Delimiter: ","
## chr (2): Lightness, Saturation
## dbl (6): R, G, B, Hue, response, outcome
## 
## ℹ Use `spec()` to retrieve the full column specification for this data.
## ℹ Specify the column types or set `show_col_types = FALSE` to quiet this message.
df %>% glimpse()
## Rows: 835
## Columns: 8
## $ R          <dbl> 172, 26, 172, 28, 170, 175, 90, 194, 171, 122, 0, 88, 144, …
## $ G          <dbl> 58, 88, 94, 87, 66, 89, 78, 106, 68, 151, 121, 140, 82, 163…
## $ B          <dbl> 62, 151, 58, 152, 58, 65, 136, 53, 107, 59, 88, 58, 132, 50…
## $ Lightness  <chr> "dark", "dark", "dark", "dark", "dark", "dark", "dark", "da…
## $ Saturation <chr> "bright", "bright", "bright", "bright", "bright", "bright",…
## $ Hue        <dbl> 4, 31, 8, 32, 5, 6, 34, 10, 1, 21, 24, 22, 36, 16, 26, 12, …
## $ response   <dbl> 12, 10, 16, 10, 11, 16, 10, 19, 14, 25, 14, 19, 14, 38, 15,…
## $ outcome    <dbl> 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1,…

Part i: Exploration

#1. Visualize the distributions of variables in the data set.

#1.1 Counts for categorical variables.

ans: Here we calculate the lightness variables and saturation separately, and we also calculate them into each pair with one lightness and saturation.

counts_lightness <- df %>%
  count(Lightness)
counts_saturation <- df %>%
  count(Saturation)
counts_df <- df %>%
  count(Lightness, Saturation)

print(counts_lightness)
## # A tibble: 7 × 2
##   Lightness     n
##   <chr>     <int>
## 1 dark        117
## 2 deep        119
## 3 light       120
## 4 midtone     119
## 5 pale        121
## 6 saturated   119
## 7 soft        120
print(counts_saturation)
## # A tibble: 7 × 2
##   Saturation     n
##   <chr>      <int>
## 1 bright       126
## 2 gray          83
## 3 muted        126
## 4 neutral      122
## 5 pure         126
## 6 shaded       126
## 7 subdued      126
print(counts_df)
## # A tibble: 49 × 3
##    Lightness Saturation     n
##    <chr>     <chr>      <int>
##  1 dark      bright        18
##  2 dark      gray          10
##  3 dark      muted         18
##  4 dark      neutral       17
##  5 dark      pure          18
##  6 dark      shaded        18
##  7 dark      subdued       18
##  8 deep      bright        18
##  9 deep      gray          12
## 10 deep      muted         18
## # ℹ 39 more rows

#1.2 Histograms or Density plots for continuous variables. Are the distributions Gaussian like?

Acoording to the graphs below, they are not Gaussian like.

ggplot(df, aes(x = response)) +
  geom_density(fill = "lightblue", color = "darkblue", binwidth = 10) +
  labs(title = "Density Plot of Response", x = "Response")
## Warning in geom_density(fill = "lightblue", color = "darkblue", binwidth = 10):
## Ignoring unknown parameters: `binwidth`

ggplot(df, aes(x = R)) +
  geom_density(fill = "lightblue", color = "darkblue", binwidth = 10) +
  labs(title = "Density Plot of R", x = "R")
## Warning in geom_density(fill = "lightblue", color = "darkblue", binwidth = 10):
## Ignoring unknown parameters: `binwidth`

ggplot(df, aes(x = G)) +
  geom_density(fill = "lightblue", color = "darkblue", binwidth = 10) +
  labs(title = "Density Plot of G", x = "G")
## Warning in geom_density(fill = "lightblue", color = "darkblue", binwidth = 10):
## Ignoring unknown parameters: `binwidth`

ggplot(df, aes(x = B)) +
  geom_density(fill = "lightblue", color = "darkblue", binwidth = 10) +
  labs(title = "Density Plot of B", x = "B")
## Warning in geom_density(fill = "lightblue", color = "darkblue", binwidth = 10):
## Ignoring unknown parameters: `binwidth`

ggplot(df, aes(x = Hue)) +
  geom_density(fill = "lightblue", color = "darkblue", binwidth = 10) +
  labs(title = "Density Plot of Hue", x = "Hue")
## Warning in geom_density(fill = "lightblue", color = "darkblue", binwidth = 10):
## Ignoring unknown parameters: `binwidth`

#2.Condition (group) the continuous variables based on the categorical variables. #2.1 Are there differences in continuous variable distributions and continuous variable summary statistics based on categorical variable values?

library(reshape2)
## 
## Attaching package: 'reshape2'
## The following object is masked from 'package:tidyr':
## 
##     smiths
# 使用 melt 函數將 R, G, B 轉換為一個變數
df_melted <- melt(df, id.vars = c("Lightness"), measure.vars = c("R", "G", "B"))

# 繪製盒狀圖
ggplot(df_melted, aes(x = Lightness, y = value, fill = variable)) +
  geom_boxplot() +
  labs(title = "Boxplot of R, G, B Grouped by Lightness") +
  theme(axis.text.x = element_text(angle = 45, hjust = 1))

# 使用ANOVA進行分組比較
model_anova <- aov(R ~ Lightness, data = df)
anova_result <- summary(model_anova)
print(anova_result)
##              Df  Sum Sq Mean Sq F value Pr(>F)    
## Lightness     6 1460742  243457   157.3 <2e-16 ***
## Residuals   828 1281177    1547                   
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1

In your results, the p-value Pr(>F) is less than 0.05 (common significance level), so we reject the null hypothesis. This implies that there is a statistically significant difference between different levels of Lightness.

#2.2 Are there differences in continuous variable distributions and continuous variable summary statistics based on the binary outcome?

summary(df$response)
##    Min. 1st Qu.  Median    Mean 3rd Qu.    Max. 
##     6.0    26.0    51.0    48.6    72.0    87.0
# Summary statistics for 'response' based on 'outcome'
summary(df$response[df$outcome == 1])  # Replace 1 with the actual code for the positive outcome
##    Min. 1st Qu.  Median    Mean 3rd Qu.    Max. 
##    6.00   17.00   46.00   45.22   70.00   85.00
summary(df$response[df$outcome == 0])  # Replace 0 with the actual code for the negative outcome
##    Min. 1st Qu.  Median    Mean 3rd Qu.    Max. 
##    6.00   27.00   52.50   49.61   73.00   87.00

the first set of answers focuses on summarizing the overall distribution of a continuous variable, while the second set of answers explores how the distribution of the continuous variable differs based on a binary outcome variable. The second set of answers involves comparing the distribution of the continuous variable between different groups defined by the binary outcome.

#3.Visualize the relationships between the continuous inputs, are they correlated?

mod01 <- lm( response ~ R*G*B*Hue, data = df )
mod01%>% coefplot::coefplot()+
  theme(legend.position = 'none')

As we see below all features are not statistically significant except Hue, because Hue doesn’t contain zero.

# 使用散點圖視覺化連續輸入變量之間的關係
pairs(df[, c("R", "G", "B", "Hue")], pch = 16, col = "blue", main = "Scatterplot Matrix")

# 計算相關性矩陣
cor_matrix <- cor(df[, c("R", "G", "B", "Hue")])

# 視覺化相關性矩陣
heatmap(cor_matrix, annot = TRUE, cmap = "Blues", main = "Correlation Matrix")
## Warning in plot.window(...): "annot" is not a graphical parameter
## Warning in plot.window(...): "cmap" is not a graphical parameter
## Warning in plot.xy(xy, type, ...): "annot" is not a graphical parameter
## Warning in plot.xy(xy, type, ...): "cmap" is not a graphical parameter
## Warning in title(...): "annot" is not a graphical parameter
## Warning in title(...): "cmap" is not a graphical parameter

4.Visualize the relationships between the continuous outputs (response and the LOGIT-transformed response, y) with respect to the continuous INPUTS. 4.1Can you identify any clear trends? Do the trends depend on the categorical INPUTS?

dfii <- df %>% 
  mutate(y = boot::logit( (response - 0) / (100 - 0) ) ) %>% 
  select(R, G, B, 
         Lightness, Saturation, Hue,
         y)

dfii %>% glimpse()
## Rows: 835
## Columns: 7
## $ R          <dbl> 172, 26, 172, 28, 170, 175, 90, 194, 171, 122, 0, 88, 144, …
## $ G          <dbl> 58, 88, 94, 87, 66, 89, 78, 106, 68, 151, 121, 140, 82, 163…
## $ B          <dbl> 62, 151, 58, 152, 58, 65, 136, 53, 107, 59, 88, 58, 132, 50…
## $ Lightness  <chr> "dark", "dark", "dark", "dark", "dark", "dark", "dark", "da…
## $ Saturation <chr> "bright", "bright", "bright", "bright", "bright", "bright",…
## $ Hue        <dbl> 4, 31, 8, 32, 5, 6, 34, 10, 1, 21, 24, 22, 36, 16, 26, 12, …
## $ y          <dbl> -1.9924302, -2.1972246, -1.6582281, -2.1972246, -2.0907411,…
mod02 <- lm( y ~ R*G*B*Hue, data = dfii )
mod02 %>% coefplot::coefplot()+
  theme(legend.position = 'none')

5.How can you visualize the behavior of the binary outcome with respect to the continuous inputs? How can you visualize the behavior of the binary outcome with respect to the categorical INPUTS?

library(reshape2)

# 使用 melt 函數將 R, G, B 轉換為一個變數
df_melted <- melt(df, id.vars = c("outcome"), measure.vars = c("R", "G", "B"))

# 繪製盒狀圖
ggplot(df_melted, aes(x = outcome, y = value, fill = variable)) +
  geom_boxplot() +
  labs(title = "Boxplot of R, G, B Grouped by outcome") +
  theme(axis.text.x = element_text(angle = 45, hjust = 1))

# Create a boxplot for the binary outcome with respect to Lightness and Saturation
ggplot(df, aes(x = Lightness, y = response, fill = factor(outcome))) +
  geom_boxplot() +
  labs(title = "Boxplot of Response with Respect to Lightness and Outcome", x = "Lightness", y = "Response", fill = "Outcome") +
  facet_wrap(~ Saturation, scales = "free", ncol = 2)  # Facet by Saturation

Part ii: Regression - iiA) Linear models

Before using more advanced methods, you need to develop a baseline understanding for the behavior of the LOGIT-transformed response as a function of the inputs using linear modeling techniques.

df_standard <- dfii
# Standardization function
standardize <- function(x) {
  return ((x - mean(x)) / sd(x))
}

# Apply the function to the variables
df_standard$R <- standardize(dfii$R)
df_standard$G <- standardize(dfii$G)
df_standard$B <- standardize(dfii$B)
df_standard$Hue <- standardize(dfii$Hue)
df_standard$y <- standardize(dfii$y)

df_standard %>% glimpse()
## Rows: 835
## Columns: 7
## $ R          <dbl> -0.19790120, -2.74419521, -0.19790120, -2.70931447, -0.2327…
## $ G          <dbl> -2.3619736, -1.7631189, -1.6433480, -1.7830807, -2.2022790,…
## $ B          <dbl> -1.7994266, -0.1706872, -1.8726283, -0.1523868, -1.8726283,…
## $ Lightness  <chr> "dark", "dark", "dark", "dark", "dark", "dark", "dark", "da…
## $ Saturation <chr> "bright", "bright", "bright", "bright", "bright", "bright",…
## $ Hue        <dbl> -1.3548215, 1.3198239, -0.9585777, 1.4188848, -1.2557605, -…
## $ y          <dbl> -1.5899718, -1.7628901, -1.3077880, -1.7628901, -1.6729807,…

Use lm() to fit linear models. You must use the following: #A1. Intercept-only model – no INPUTS!

data <- data.frame(logit_response = dfii$y)
intercept_only_model <- lm(logit_response ~ 1, data = data)
summary(intercept_only_model)
## 
## Call:
## lm(formula = logit_response ~ 1, data = data)
## 
## Residuals:
##     Min      1Q  Median      3Q     Max 
## -2.6422 -0.9366  0.1494  1.0538  2.0103 
## 
## Coefficients:
##             Estimate Std. Error t value Pr(>|t|)   
## (Intercept) -0.10936    0.04099  -2.668  0.00777 **
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## Residual standard error: 1.184 on 834 degrees of freedom
fit_lm_01 <- lm(y ~ 1, data =df_standard)

fit_lm_01 %>% summary()
## 
## Call:
## lm(formula = y ~ 1, data = df_standard)
## 
## Residuals:
##     Min      1Q  Median      3Q     Max 
## -2.2309 -0.7908  0.1261  0.8898  1.6974 
## 
## Coefficients:
##               Estimate Std. Error t value Pr(>|t|)
## (Intercept) -2.305e-17  3.461e-02       0        1
## 
## Residual standard error: 1 on 834 degrees of freedom

#A2. Categorical variables only – linear additive

fit_lm_02 <- lm(y ~ Lightness + Saturation, data = df_standard)

fit_lm_02 %>% summary()
## 
## Call:
## lm(formula = y ~ Lightness + Saturation, data = df_standard)
## 
## Residuals:
##      Min       1Q   Median       3Q      Max 
## -0.90968 -0.22814 -0.01335  0.19000  1.35945 
## 
## Coefficients:
##                    Estimate Std. Error t value Pr(>|t|)    
## (Intercept)        -1.30251    0.04224 -30.837  < 2e-16 ***
## Lightnessdeep       0.46151    0.04455  10.360  < 2e-16 ***
## Lightnesslight      2.35306    0.04446  52.931  < 2e-16 ***
## Lightnessmidtone    1.51244    0.04455  33.952  < 2e-16 ***
## Lightnesspale       2.66009    0.04437  59.954  < 2e-16 ***
## Lightnesssaturated  1.01152    0.04455  22.707  < 2e-16 ***
## Lightnesssoft       1.98898    0.04446  44.741  < 2e-16 ***
## Saturationgray     -0.47209    0.04837  -9.759  < 2e-16 ***
## Saturationmuted    -0.15178    0.04310  -3.521 0.000453 ***
## Saturationneutral  -0.27411    0.04346  -6.308 4.62e-10 ***
## Saturationpure      0.34271    0.04310   7.951 6.12e-15 ***
## Saturationshaded   -0.25504    0.04310  -5.917 4.81e-09 ***
## Saturationsubdued  -0.23723    0.04310  -5.504 4.97e-08 ***
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## Residual standard error: 0.3421 on 822 degrees of freedom
## Multiple R-squared:  0.8846, Adjusted R-squared:  0.8829 
## F-statistic: 525.3 on 12 and 822 DF,  p-value: < 2.2e-16
library(coefplot)
coefplot(fit_lm_02)

#A3. Continuous variables only – linear additive

fit_lm_03 <- lm(y ~ R + G + B + Hue, data = df_standard)
coefplot(fit_lm_03)

#A4.All categorical and continuous variables – linear additive

fit_lm_04 <- lm(y ~ ., data = df_standard)
coefplot(fit_lm_04)

#A5. Interaction of the categorical inputs with all continuous inputs main effects

fit_lm_05 <- lm(y ~ (Lightness + Saturation) * (R + G + B + Hue), data = df_standard)
coefplot(fit_lm_05)

#A6. Add categorical inputs to all main effect and all pairwise interactions of continuous inputs

fit_lm_06 <- lm(y ~ Lightness + Saturation + (R + G + B + Hue)^2, data = df_standard)

fit_lm_06 %>% summary()
## 
## Call:
## lm(formula = y ~ Lightness + Saturation + (R + G + B + Hue)^2, 
##     data = df_standard)
## 
## Residuals:
##       Min        1Q    Median        3Q       Max 
## -0.236008 -0.038741 -0.003913  0.035839  0.241332 
## 
## Coefficients:
##                     Estimate Std. Error t value Pr(>|t|)    
## (Intercept)        -0.030689   0.018823  -1.630 0.103407    
## Lightnessdeep       0.043867   0.010872   4.035 5.98e-05 ***
## Lightnesslight     -0.067735   0.025544  -2.652 0.008166 ** 
## Lightnessmidtone   -0.085469   0.019500  -4.383 1.32e-05 ***
## Lightnesspale       0.036528   0.027182   1.344 0.179377    
## Lightnesssaturated -0.007675   0.015221  -0.504 0.614230    
## Lightnesssoft      -0.099784   0.022806  -4.375 1.37e-05 ***
## Saturationgray     -0.035763   0.011384  -3.142 0.001741 ** 
## Saturationmuted    -0.017965   0.008552  -2.101 0.035971 *  
## Saturationneutral  -0.044623   0.009441  -4.727 2.69e-06 ***
## Saturationpure      0.025348   0.008919   2.842 0.004597 ** 
## Saturationshaded   -0.034739   0.009117  -3.810 0.000149 ***
## Saturationsubdued  -0.038154   0.008793  -4.339 1.61e-05 ***
## R                   0.232483   0.006785  34.264  < 2e-16 ***
## G                   0.749420   0.009083  82.512  < 2e-16 ***
## B                   0.135239   0.007819  17.296  < 2e-16 ***
## Hue                -0.004240   0.005137  -0.825 0.409434    
## R:G                 0.028167   0.004905   5.742 1.32e-08 ***
## R:B                -0.010631   0.007314  -1.453 0.146487    
## R:Hue              -0.041204   0.007204  -5.720 1.50e-08 ***
## G:B                 0.053081   0.006202   8.559  < 2e-16 ***
## G:Hue               0.027295   0.008677   3.146 0.001717 ** 
## B:Hue               0.007130   0.010809   0.660 0.509713    
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## Residual standard error: 0.06666 on 812 degrees of freedom
## Multiple R-squared:  0.9957, Adjusted R-squared:  0.9956 
## F-statistic:  8494 on 22 and 812 DF,  p-value: < 2.2e-16
coefplot(fit_lm_06)

#A7. Interaction of the categorical inputs with all main effect and all pairwise interactions of continuous inputs

fit_lm_07 <- lm(y ~ (Lightness + Saturation) * (R + G + B + Hue)^2, data = df_standard)
coefplot(fit_lm_07)

#A8. 3 models with basis functions of your choice Try non-linear basis functions based on your EDA.

fit_lm_08 <- lm(y ~ (Lightness + Saturation) * (( R + G + B + Hue)^2 + I(R^2) + I(G^2) + I(B^2) + I(Hue^2)), data = df_standard)
fit_lm_08 %>% summary()
## 
## Call:
## lm(formula = y ~ (Lightness + Saturation) * ((R + G + B + Hue)^2 + 
##     I(R^2) + I(G^2) + I(B^2) + I(Hue^2)), data = df_standard)
## 
## Residuals:
##       Min        1Q    Median        3Q       Max 
## -0.125500 -0.015352 -0.000605  0.015508  0.133380 
## 
## Coefficients:
##                               Estimate Std. Error t value Pr(>|t|)    
## (Intercept)                 -5.740e-01  1.229e-01  -4.669 3.68e-06 ***
## Lightnessdeep                4.347e-01  1.180e-01   3.683 0.000250 ***
## Lightnesslight               5.684e-01  1.325e-01   4.289 2.07e-05 ***
## Lightnessmidtone             4.396e-01  1.242e-01   3.539 0.000430 ***
## Lightnesspale                1.024e+00  1.769e-01   5.790 1.10e-08 ***
## Lightnesssaturated           4.619e-01  1.207e-01   3.827 0.000142 ***
## Lightnesssoft                4.665e-01  1.248e-01   3.738 0.000202 ***
## Saturationgray              -1.993e-02  3.240e-02  -0.615 0.538765    
## Saturationmuted             -1.097e-02  3.098e-02  -0.354 0.723496    
## Saturationneutral           -2.988e-03  3.065e-02  -0.098 0.922355    
## Saturationpure              -5.557e-02  4.475e-02  -1.242 0.214840    
## Saturationshaded            -6.139e-04  3.087e-02  -0.020 0.984140    
## Saturationsubdued           -1.784e-02  3.093e-02  -0.577 0.564358    
## R                            1.631e-01  4.856e-02   3.359 0.000829 ***
## G                            4.170e-01  5.547e-02   7.517 1.90e-13 ***
## B                           -4.034e-01  1.302e-01  -3.097 0.002038 ** 
## Hue                          1.331e-01  6.460e-02   2.060 0.039763 *  
## I(R^2)                       4.656e-02  9.126e-03   5.102 4.44e-07 ***
## I(G^2)                       2.743e-02  2.873e-02   0.955 0.340111    
## I(B^2)                      -1.243e-01  3.887e-02  -3.197 0.001456 ** 
## I(Hue^2)                     2.758e-03  1.805e-02   0.153 0.878584    
## R:G                         -8.968e-02  3.068e-02  -2.923 0.003584 ** 
## R:B                         -2.549e-02  2.682e-02  -0.950 0.342234    
## R:Hue                        2.807e-02  3.164e-02   0.887 0.375308    
## G:B                         -1.531e-01  4.267e-02  -3.589 0.000358 ***
## G:Hue                        3.767e-02  3.407e-02   1.105 0.269365    
## B:Hue                        8.085e-02  3.987e-02   2.028 0.043005 *  
## Lightnessdeep:R             -7.683e-03  4.321e-02  -0.178 0.858925    
## Lightnesslight:R             1.451e-01  1.371e-01   1.058 0.290250    
## Lightnessmidtone:R           5.111e-02  4.755e-02   1.075 0.282878    
## Lightnesspale:R             -3.972e-01  3.646e-01  -1.089 0.276491    
## Lightnesssaturated:R         6.853e-02  4.376e-02   1.566 0.117829    
## Lightnesssoft:R              5.585e-02  6.214e-02   0.899 0.369108    
## Lightnessdeep:G              2.248e-01  5.025e-02   4.473 9.12e-06 ***
## Lightnesslight:G            -2.549e-01  1.660e-01  -1.536 0.125121    
## Lightnessmidtone:G           2.251e-01  5.900e-02   3.815 0.000150 ***
## Lightnesspale:G             -8.384e-01  4.563e-01  -1.837 0.066623 .  
## Lightnesssaturated:G         2.284e-01  5.160e-02   4.427 1.13e-05 ***
## Lightnesssoft:G              7.211e-02  8.030e-02   0.898 0.369491    
## Lightnessdeep:B              5.033e-01  1.260e-01   3.996 7.18e-05 ***
## Lightnesslight:B             4.837e-01  1.612e-01   3.001 0.002799 ** 
## Lightnessmidtone:B           5.347e-01  1.315e-01   4.067 5.36e-05 ***
## Lightnesspale:B              4.947e-01  2.378e-01   2.080 0.037898 *  
## Lightnesssaturated:B         5.630e-01  1.260e-01   4.468 9.34e-06 ***
## Lightnesssoft:B              5.238e-01  1.354e-01   3.870 0.000120 ***
## Lightnessdeep:Hue           -2.048e-01  6.599e-02  -3.104 0.001994 ** 
## Lightnesslight:Hue          -1.332e-01  7.253e-02  -1.837 0.066663 .  
## Lightnessmidtone:Hue        -1.589e-01  6.363e-02  -2.497 0.012772 *  
## Lightnesspale:Hue           -1.625e-01  8.888e-02  -1.828 0.068054 .  
## Lightnesssaturated:Hue      -1.444e-01  6.403e-02  -2.255 0.024445 *  
## Lightnesssoft:Hue           -1.594e-01  6.563e-02  -2.429 0.015411 *  
## Lightnessdeep:I(R^2)        -9.327e-03  8.874e-03  -1.051 0.293647    
## Lightnesslight:I(R^2)       -7.219e-02  7.377e-02  -0.979 0.328193    
## Lightnessmidtone:I(R^2)      4.760e-03  1.779e-02   0.268 0.789132    
## Lightnesspale:I(R^2)         2.988e-01  2.231e-01   1.339 0.180897    
## Lightnesssaturated:I(R^2)    1.755e-03  9.818e-03   0.179 0.858179    
## Lightnesssoft:I(R^2)         1.044e-02  3.702e-02   0.282 0.778111    
## Lightnessdeep:I(G^2)         6.690e-02  2.053e-02   3.258 0.001180 ** 
## Lightnesslight:I(G^2)        3.432e-01  1.293e-01   2.654 0.008158 ** 
## Lightnessmidtone:I(G^2)      9.108e-02  4.472e-02   2.037 0.042087 *  
## Lightnesspale:I(G^2)         2.753e-01  3.361e-01   0.819 0.413091    
## Lightnesssaturated:I(G^2)    6.074e-02  2.885e-02   2.105 0.035667 *  
## Lightnesssoft:I(G^2)         2.548e-01  7.069e-02   3.605 0.000337 ***
## Lightnessdeep:I(B^2)         1.410e-01  3.725e-02   3.785 0.000168 ***
## Lightnesslight:I(B^2)        2.097e-01  6.587e-02   3.184 0.001523 ** 
## Lightnessmidtone:I(B^2)      1.675e-01  3.893e-02   4.304 1.94e-05 ***
## Lightnesspale:I(B^2)         2.031e-01  1.088e-01   1.867 0.062397 .  
## Lightnesssaturated:I(B^2)    1.667e-01  3.840e-02   4.341 1.65e-05 ***
## Lightnesssoft:I(B^2)         2.116e-01  4.521e-02   4.681 3.49e-06 ***
## Lightnessdeep:I(Hue^2)      -3.804e-03  1.196e-02  -0.318 0.750587    
## Lightnesslight:I(Hue^2)     -4.958e-03  1.680e-02  -0.295 0.768033    
## Lightnessmidtone:I(Hue^2)   -6.729e-03  1.573e-02  -0.428 0.668964    
## Lightnesspale:I(Hue^2)      -4.647e-03  1.597e-02  -0.291 0.771202    
## Lightnesssaturated:I(Hue^2) -1.369e-02  1.332e-02  -1.028 0.304457    
## Lightnesssoft:I(Hue^2)      -1.799e-02  1.718e-02  -1.047 0.295356    
## Saturationgray:R            -7.726e-03  1.336e-01  -0.058 0.953893    
## Saturationmuted:R            1.801e-03  2.805e-02   0.064 0.948832    
## Saturationneutral:R          6.425e-02  5.264e-02   1.221 0.222646    
## Saturationpure:R             3.889e-02  3.024e-02   1.286 0.198924    
## Saturationshaded:R          -8.517e-03  3.989e-02  -0.214 0.830987    
## Saturationsubdued:R          2.550e-02  3.297e-02   0.773 0.439627    
## Saturationgray:G            -1.178e-01  1.631e-01  -0.723 0.470079    
## Saturationmuted:G            6.606e-04  4.410e-02   0.015 0.988053    
## Saturationneutral:G         -6.066e-02  8.990e-02  -0.675 0.500036    
## Saturationpure:G             4.224e-02  3.922e-02   1.077 0.281977    
## Saturationshaded:G           3.322e-02  6.555e-02   0.507 0.612488    
## Saturationsubdued:G         -4.625e-04  5.330e-02  -0.009 0.993079    
## Saturationgray:B             1.423e-01  1.037e-01   1.372 0.170585    
## Saturationmuted:B            2.640e-02  3.160e-02   0.835 0.403833    
## Saturationneutral:B          1.006e-02  5.894e-02   0.171 0.864533    
## Saturationpure:B            -3.860e-02  2.836e-02  -1.361 0.174060    
## Saturationshaded:B          -2.382e-02  4.371e-02  -0.545 0.585942    
## Saturationsubdued:B          7.099e-04  3.715e-02   0.019 0.984760    
## Saturationgray:Hue           3.804e-05  3.719e-02   0.001 0.999184    
## Saturationmuted:Hue          2.625e-04  2.964e-02   0.009 0.992935    
## Saturationneutral:Hue        4.235e-02  3.060e-02   1.384 0.166833    
## Saturationpure:Hue           3.863e-02  3.700e-02   1.044 0.296781    
## Saturationshaded:Hue         2.487e-02  3.019e-02   0.824 0.410388    
## Saturationsubdued:Hue        1.873e-02  3.018e-02   0.621 0.535115    
## Saturationgray:I(R^2)        3.507e-01  4.509e-01   0.778 0.437004    
## Saturationmuted:I(R^2)       4.412e-02  1.405e-02   3.140 0.001768 ** 
## Saturationneutral:I(R^2)    -9.845e-02  1.477e-01  -0.667 0.505320    
## Saturationpure:I(R^2)        8.689e-03  1.047e-02   0.830 0.406918    
## Saturationshaded:I(R^2)     -9.684e-02  4.802e-02  -2.016 0.044166 *  
## Saturationsubdued:I(R^2)     4.059e-02  2.604e-02   1.559 0.119496    
## Saturationgray:I(G^2)        5.604e-02  6.991e-01   0.080 0.936140    
## Saturationmuted:I(G^2)       5.587e-02  3.474e-02   1.608 0.108240    
## Saturationneutral:I(G^2)    -4.113e-01  2.176e-01  -1.890 0.059155 .  
## Saturationpure:I(G^2)        1.592e-02  2.779e-02   0.573 0.566951    
## Saturationshaded:I(G^2)     -6.115e-02  9.074e-02  -0.674 0.500651    
## Saturationsubdued:I(G^2)    -1.946e-02  4.496e-02  -0.433 0.665365    
## Saturationgray:I(B^2)       -2.276e-01  2.326e-01  -0.979 0.328175    
## Saturationmuted:I(B^2)       5.149e-02  2.271e-02   2.267 0.023712 *  
## Saturationneutral:I(B^2)    -1.073e-01  7.824e-02  -1.371 0.170714    
## Saturationpure:I(B^2)       -1.393e-02  1.544e-02  -0.902 0.367315    
## Saturationshaded:I(B^2)      4.691e-02  4.523e-02   1.037 0.300067    
## Saturationsubdued:I(B^2)     8.180e-03  2.649e-02   0.309 0.757579    
## Saturationgray:I(Hue^2)     -1.191e-03  1.931e-02  -0.062 0.950860    
## Saturationmuted:I(Hue^2)    -2.269e-02  1.532e-02  -1.481 0.139129    
## Saturationneutral:I(Hue^2)   1.765e-02  1.625e-02   1.086 0.277735    
## Saturationpure:I(Hue^2)      1.881e-02  1.399e-02   1.344 0.179297    
## Saturationshaded:I(Hue^2)    1.850e-03  1.498e-02   0.124 0.901716    
## Saturationsubdued:I(Hue^2)  -1.052e-02  1.565e-02  -0.672 0.501866    
## Lightnessdeep:R:G            5.004e-03  1.886e-02   0.265 0.790829    
## Lightnesslight:R:G           2.533e-01  1.325e-01   1.912 0.056313 .  
## Lightnessmidtone:R:G         1.229e-01  4.718e-02   2.605 0.009400 ** 
## Lightnesspale:R:G            6.957e-01  3.645e-01   1.909 0.056737 .  
## Lightnesssaturated:R:G       7.351e-02  2.793e-02   2.632 0.008697 ** 
## Lightnesssoft:R:G            1.012e-01  7.566e-02   1.337 0.181688    
## Lightnessdeep:R:B           -1.165e-02  2.386e-02  -0.488 0.625555    
## Lightnesslight:R:B          -6.598e-02  1.026e-01  -0.643 0.520627    
## Lightnessmidtone:R:B         4.224e-03  3.560e-02   0.119 0.905599    
## Lightnesspale:R:B           -4.629e-01  2.799e-01  -1.654 0.098693 .  
## Lightnesssaturated:R:B       4.583e-02  2.789e-02   1.643 0.100854    
## Lightnesssoft:R:B            8.404e-03  5.500e-02   0.153 0.878610    
## Lightnessdeep:R:Hue         -3.360e-03  1.729e-02  -0.194 0.845981    
## Lightnesslight:R:Hue         2.510e-03  4.470e-02   0.056 0.955240    
## Lightnessmidtone:R:Hue      -6.270e-03  2.600e-02  -0.241 0.809552    
## Lightnesspale:R:Hue          1.245e-01  7.858e-02   1.585 0.113513    
## Lightnesssaturated:R:Hue    -1.019e-02  2.032e-02  -0.501 0.616402    
## Lightnesssoft:R:Hue         -1.562e-03  3.446e-02  -0.045 0.963867    
## Lightnessdeep:G:B            1.105e-01  3.088e-02   3.577 0.000373 ***
## Lightnesslight:G:B           2.217e-01  1.222e-01   1.815 0.070063 .  
## Lightnessmidtone:G:B         1.522e-01  4.758e-02   3.198 0.001450 ** 
## Lightnesspale:G:B            5.919e-01  3.194e-01   1.853 0.064293 .  
## Lightnesssaturated:G:B       1.181e-01  3.608e-02   3.272 0.001127 ** 
## Lightnesssoft:G:B            1.260e-01  7.096e-02   1.776 0.076212 .  
## Lightnessdeep:G:Hue         -8.602e-02  3.038e-02  -2.831 0.004787 ** 
## Lightnesslight:G:Hue        -3.120e-03  5.601e-02  -0.056 0.955598    
## Lightnessmidtone:G:Hue      -1.028e-02  4.171e-02  -0.247 0.805323    
## Lightnesspale:G:Hue         -4.457e-02  7.640e-02  -0.583 0.559832    
## Lightnesssaturated:G:Hue    -2.276e-02  3.461e-02  -0.658 0.511073    
## Lightnesssoft:G:Hue         -4.206e-02  4.633e-02  -0.908 0.364315    
## Lightnessdeep:B:Hue         -8.372e-02  2.929e-02  -2.858 0.004397 ** 
## Lightnesslight:B:Hue        -1.382e-01  4.860e-02  -2.844 0.004604 ** 
## Lightnessmidtone:B:Hue      -8.364e-02  3.525e-02  -2.373 0.017953 *  
## Lightnesspale:B:Hue         -1.631e-01  6.742e-02  -2.418 0.015863 *  
## Lightnesssaturated:B:Hue    -5.887e-02  3.048e-02  -1.931 0.053880 .  
## Lightnesssoft:B:Hue         -8.644e-02  4.071e-02  -2.123 0.034103 *  
## Saturationgray:R:G          -7.067e-01  1.068e+00  -0.662 0.508266    
## Saturationmuted:R:G         -1.153e-01  3.770e-02  -3.059 0.002316 ** 
## Saturationneutral:R:G        3.978e-01  3.232e-01   1.231 0.218819    
## Saturationpure:R:G           3.260e-06  3.163e-02   0.000 0.999918    
## Saturationshaded:R:G         1.796e-01  1.030e-01   1.743 0.081838 .  
## Saturationsubdued:R:G       -7.833e-02  5.576e-02  -1.405 0.160537    
## Saturationgray:R:B           2.204e-01  4.825e-01   0.457 0.647921    
## Saturationmuted:R:B          5.159e-02  3.079e-02   1.676 0.094308 .  
## Saturationneutral:R:B       -2.422e-01  1.320e-01  -1.836 0.066895 .  
## Saturationpure:R:B          -1.737e-02  2.142e-02  -0.811 0.417748    
## Saturationshaded:R:B        -4.993e-02  7.533e-02  -0.663 0.507664    
## Saturationsubdued:R:B        1.566e-02  3.860e-02   0.406 0.684978    
## Saturationgray:R:Hue         5.358e-02  1.348e-01   0.397 0.691182    
## Saturationmuted:R:Hue       -8.457e-03  3.214e-02  -0.263 0.792529    
## Saturationneutral:R:Hue      3.524e-02  7.918e-02   0.445 0.656472    
## Saturationpure:R:Hue        -2.235e-02  3.801e-02  -0.588 0.556769    
## Saturationshaded:R:Hue      -4.133e-02  4.705e-02  -0.878 0.380105    
## Saturationsubdued:R:Hue     -2.503e-02  3.787e-02  -0.661 0.508859    
## Saturationgray:G:B           3.230e-01  6.893e-01   0.469 0.639517    
## Saturationmuted:G:B         -7.810e-02  5.265e-02  -1.483 0.138482    
## Saturationneutral:G:B        4.554e-01  2.291e-01   1.987 0.047300 *  
## Saturationpure:G:B           1.353e-02  3.454e-02   0.392 0.695318    
## Saturationshaded:G:B        -3.276e-02  1.269e-01  -0.258 0.796315    
## Saturationsubdued:G:B        4.067e-02  6.306e-02   0.645 0.519252    
## Saturationgray:G:Hue        -1.215e-01  1.564e-01  -0.777 0.437700    
## Saturationmuted:G:Hue       -1.850e-02  2.909e-02  -0.636 0.525012    
## Saturationneutral:G:Hue     -1.792e-02  7.618e-02  -0.235 0.814089    
## Saturationpure:G:Hue         9.135e-03  2.725e-02   0.335 0.737552    
## Saturationshaded:G:Hue       7.314e-02  4.226e-02   1.731 0.083967 .  
## Saturationsubdued:G:Hue     -3.253e-02  3.569e-02  -0.911 0.362429    
## Saturationgray:B:Hue         7.069e-02  9.546e-02   0.741 0.459213    
## Saturationmuted:B:Hue        1.980e-02  3.904e-02   0.507 0.612201    
## Saturationneutral:B:Hue     -4.266e-02  5.169e-02  -0.825 0.409520    
## Saturationpure:B:Hue        -7.908e-03  3.794e-02  -0.208 0.834956    
## Saturationshaded:B:Hue      -5.420e-02  4.629e-02  -1.171 0.242119    
## Saturationsubdued:B:Hue      2.367e-02  4.151e-02   0.570 0.568665    
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## Residual standard error: 0.03175 on 640 degrees of freedom
## Multiple R-squared:  0.9992, Adjusted R-squared:  0.999 
## F-statistic:  4260 on 194 and 640 DF,  p-value: < 2.2e-16

#A9. Can consider interactions of basis functions with other basis functions!

fit_lm_09 <- lm(y ~ Lightness + Saturation + R*G*B*Hue + I(R^2) + I(G^2) + I(B^2) + I(Hue^2), data = df_standard)
fit_lm_09 %>% summary()
## 
## Call:
## lm(formula = y ~ Lightness + Saturation + R * G * B * Hue + I(R^2) + 
##     I(G^2) + I(B^2) + I(Hue^2), data = df_standard)
## 
## Residuals:
##       Min        1Q    Median        3Q       Max 
## -0.197582 -0.027025 -0.002384  0.026201  0.250074 
## 
## Coefficients:
##                      Estimate Std. Error t value Pr(>|t|)    
## (Intercept)        -0.1657261  0.0174878  -9.477  < 2e-16 ***
## Lightnessdeep       0.0353333  0.0083990   4.207 2.88e-05 ***
## Lightnesslight      0.0058840  0.0192118   0.306 0.759480    
## Lightnessmidtone    0.0456371  0.0161270   2.830 0.004773 ** 
## Lightnesspale       0.0573979  0.0199970   2.870 0.004208 ** 
## Lightnesssaturated  0.0502504  0.0125656   3.999 6.95e-05 ***
## Lightnesssoft       0.0190397  0.0179929   1.058 0.290291    
## Saturationgray      0.0054304  0.0082281   0.660 0.509455    
## Saturationmuted     0.0081681  0.0059433   1.374 0.169723    
## Saturationneutral  -0.0018902  0.0068573  -0.276 0.782886    
## Saturationpure      0.0217553  0.0063061   3.450 0.000590 ***
## Saturationshaded    0.0036000  0.0065442   0.550 0.582405    
## Saturationsubdued   0.0007582  0.0062741   0.121 0.903848    
## R                   0.2396113  0.0073988  32.385  < 2e-16 ***
## G                   0.6455037  0.0106439  60.645  < 2e-16 ***
## B                   0.1285391  0.0097490  13.185  < 2e-16 ***
## Hue                 0.0129430  0.0057774   2.240 0.025346 *  
## I(R^2)              0.0490690  0.0042808  11.463  < 2e-16 ***
## I(G^2)              0.0957508  0.0078753  12.158  < 2e-16 ***
## I(B^2)              0.0396873  0.0059323   6.690 4.18e-11 ***
## I(Hue^2)           -0.0162166  0.0033058  -4.906 1.13e-06 ***
## R:G                 0.0177352  0.0085847   2.066 0.039157 *  
## R:B                -0.0051333  0.0073410  -0.699 0.484585    
## G:B                -0.0123909  0.0100415  -1.234 0.217575    
## R:Hue               0.0076914  0.0086092   0.893 0.371914    
## G:Hue               0.0245189  0.0071463   3.431 0.000632 ***
## B:Hue              -0.0318680  0.0096608  -3.299 0.001014 ** 
## R:G:B               0.0994143  0.0043243  22.990  < 2e-16 ***
## R:G:Hue            -0.0476393  0.0067120  -7.098 2.79e-12 ***
## R:B:Hue             0.0116556  0.0082037   1.421 0.155770    
## G:B:Hue             0.0313149  0.0051365   6.097 1.68e-09 ***
## R:G:B:Hue          -0.0055666  0.0032905  -1.692 0.091092 .  
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## Residual standard error: 0.04481 on 803 degrees of freedom
## Multiple R-squared:  0.9981, Adjusted R-squared:  0.998 
## F-statistic: 1.338e+04 on 31 and 803 DF,  p-value: < 2.2e-16

#A10. Can consider interactions of basis functions with the categorical inputs!

fit_lm_10 <- lm(y ~ (Lightness + Saturation) * R*G*B*Hue, data = df_standard)

##save model

fit_lm_01 %>% readr::write_rds("mod01.rds")
fit_lm_02 %>% readr::write_rds("mod02.rds")
fit_lm_03 %>% readr::write_rds("mod03.rds")
fit_lm_04 %>% readr::write_rds("mod04.rds")
fit_lm_05 %>% readr::write_rds("mod05.rds")
fit_lm_06 %>% readr::write_rds("mod06.rds")
fit_lm_07 %>% readr::write_rds("mod07.rds")
fit_lm_08 %>% readr::write_rds("mod08.rds")
fit_lm_09 %>% readr::write_rds("mod09.rds")
fit_lm_10 %>% readr::write_rds("mod10.rds")

##reload model

re_load_mod01 <- readr::read_rds("mod01.rds")
re_load_mod02 <- readr::read_rds("mod02.rds")
re_load_mod03 <- readr::read_rds("mod03.rds")
re_load_mod04 <- readr::read_rds("mod04.rds")
re_load_mod05 <- readr::read_rds("mod05.rds")
re_load_mod06 <- readr::read_rds("mod06.rds")
re_load_mod07 <- readr::read_rds("mod07.rds")
re_load_mod08 <- readr::read_rds("mod08.rds")
re_load_mod09 <- readr::read_rds("mod09.rds")
re_load_mod10 <- readr::read_rds("mod10.rds")

Part ii: Regression - iiA) Linear models

#1. Which of the 10 models is the best? What performance metric did you use to make your selection?

extract_metrics <- function(mod, mod_name)
{
  broom::glance(mod) %>% mutate(mod_name = mod_name)
}

all_metrics <- purrr::map2_dfr(list(re_load_mod01, re_load_mod02, re_load_mod03, re_load_mod04, re_load_mod05, re_load_mod06, re_load_mod07, re_load_mod08, re_load_mod09, re_load_mod10),
                               sprintf("%02d", 1:10),
                               extract_metrics)
all_metrics %>% glimpse()
## Rows: 10
## Columns: 13
## $ r.squared     <dbl> 0.0000000, 0.8846326, 0.9881038, 0.9945082, 0.9978083, 0…
## $ adj.r.squared <dbl> 0.0000000, 0.8829484, 0.9880465, 0.9944008, 0.9976262, 0…
## $ sigma         <dbl> 1.00000000, 0.34212801, 0.10933222, 0.07482803, 0.048722…
## $ statistic     <dbl> NA, 525.2554, 17235.0403, 9258.1844, 5477.4752, 8494.316…
## $ p.value       <dbl> NA, 0, 0, 0, 0, 0, 0, 0, 0, 0
## $ df            <dbl> NA, 12, 4, 16, 64, 22, 142, 194, 31, 207
## $ logLik        <dbl> -1184.3134, -282.6663, 665.8529, 988.5639, 1372.0751, 10…
## $ AIC           <dbl> 2372.6268, 593.3327, -1319.7058, -1941.1279, -2612.1502,…
## $ BIC           <dbl> 2382.0816, 659.5167, -1291.3412, -1856.0341, -2300.1397,…
## $ deviance      <dbl> 834.0000000, 96.2163935, 9.9214333, 4.5801732, 1.8278624…
## $ df.residual   <int> 834, 822, 830, 818, 770, 812, 692, 640, 803, 627
## $ nobs          <int> 835, 835, 835, 835, 835, 835, 835, 835, 835, 835
## $ mod_name      <chr> "01", "02", "03", "04", "05", "06", "07", "08", "09", "1…

#2. Visualize the coefficient summaries for your top 3 models.

all_metrics %>% 
  select(mod_name, AIC, BIC) %>% 
  pivot_longer(c(AIC, BIC)) %>% 
  ggplot(mapping = aes(x = mod_name, y = value)) +
  geom_point(size = 2) +
  facet_wrap(~name, scales = 'free_y') + 
  theme(axis.text.x = element_text(angle = 45, hjust = 1))

Based on the information provided, model 8 is selected as the optimal model by the less conservative Akaike Information Criterion (AIC), while the more conservative Bayesian Information Criterion (BIC) indicates that model 9 is preferable. Given that my evaluation criteria prioritize BIC as the performance metric, I conclude that model 9 is the superior model.

#3. How do the coefficient summaries compare between the top 3 models?

Model 9 considers the smallest set of features, whereas model 8 incorporates the broadest range. Both models concur on the significance of certain features associated with G.

• Which inputs seem important? In examining the summaries of models 9, 8, and 5, it appears that the features related to the continuous variable G are of significance.

re_load_mod09 %>% coefplot::coefplot()

re_load_mod08 %>% coefplot::coefplot()

re_load_mod05 %>% coefplot::coefplot()

re_load_mod09 %>% summary()
## 
## Call:
## lm(formula = y ~ Lightness + Saturation + R * G * B * Hue + I(R^2) + 
##     I(G^2) + I(B^2) + I(Hue^2), data = df_standard)
## 
## Residuals:
##       Min        1Q    Median        3Q       Max 
## -0.197582 -0.027025 -0.002384  0.026201  0.250074 
## 
## Coefficients:
##                      Estimate Std. Error t value Pr(>|t|)    
## (Intercept)        -0.1657261  0.0174878  -9.477  < 2e-16 ***
## Lightnessdeep       0.0353333  0.0083990   4.207 2.88e-05 ***
## Lightnesslight      0.0058840  0.0192118   0.306 0.759480    
## Lightnessmidtone    0.0456371  0.0161270   2.830 0.004773 ** 
## Lightnesspale       0.0573979  0.0199970   2.870 0.004208 ** 
## Lightnesssaturated  0.0502504  0.0125656   3.999 6.95e-05 ***
## Lightnesssoft       0.0190397  0.0179929   1.058 0.290291    
## Saturationgray      0.0054304  0.0082281   0.660 0.509455    
## Saturationmuted     0.0081681  0.0059433   1.374 0.169723    
## Saturationneutral  -0.0018902  0.0068573  -0.276 0.782886    
## Saturationpure      0.0217553  0.0063061   3.450 0.000590 ***
## Saturationshaded    0.0036000  0.0065442   0.550 0.582405    
## Saturationsubdued   0.0007582  0.0062741   0.121 0.903848    
## R                   0.2396113  0.0073988  32.385  < 2e-16 ***
## G                   0.6455037  0.0106439  60.645  < 2e-16 ***
## B                   0.1285391  0.0097490  13.185  < 2e-16 ***
## Hue                 0.0129430  0.0057774   2.240 0.025346 *  
## I(R^2)              0.0490690  0.0042808  11.463  < 2e-16 ***
## I(G^2)              0.0957508  0.0078753  12.158  < 2e-16 ***
## I(B^2)              0.0396873  0.0059323   6.690 4.18e-11 ***
## I(Hue^2)           -0.0162166  0.0033058  -4.906 1.13e-06 ***
## R:G                 0.0177352  0.0085847   2.066 0.039157 *  
## R:B                -0.0051333  0.0073410  -0.699 0.484585    
## G:B                -0.0123909  0.0100415  -1.234 0.217575    
## R:Hue               0.0076914  0.0086092   0.893 0.371914    
## G:Hue               0.0245189  0.0071463   3.431 0.000632 ***
## B:Hue              -0.0318680  0.0096608  -3.299 0.001014 ** 
## R:G:B               0.0994143  0.0043243  22.990  < 2e-16 ***
## R:G:Hue            -0.0476393  0.0067120  -7.098 2.79e-12 ***
## R:B:Hue             0.0116556  0.0082037   1.421 0.155770    
## G:B:Hue             0.0313149  0.0051365   6.097 1.68e-09 ***
## R:G:B:Hue          -0.0055666  0.0032905  -1.692 0.091092 .  
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## Residual standard error: 0.04481 on 803 degrees of freedom
## Multiple R-squared:  0.9981, Adjusted R-squared:  0.998 
## F-statistic: 1.338e+04 on 31 and 803 DF,  p-value: < 2.2e-16
re_load_mod08 %>% summary()
## 
## Call:
## lm(formula = y ~ (Lightness + Saturation) * ((R + G + B + Hue)^2 + 
##     I(R^2) + I(G^2) + I(B^2) + I(Hue^2)), data = df_standard)
## 
## Residuals:
##       Min        1Q    Median        3Q       Max 
## -0.125500 -0.015352 -0.000605  0.015508  0.133380 
## 
## Coefficients:
##                               Estimate Std. Error t value Pr(>|t|)    
## (Intercept)                 -5.740e-01  1.229e-01  -4.669 3.68e-06 ***
## Lightnessdeep                4.347e-01  1.180e-01   3.683 0.000250 ***
## Lightnesslight               5.684e-01  1.325e-01   4.289 2.07e-05 ***
## Lightnessmidtone             4.396e-01  1.242e-01   3.539 0.000430 ***
## Lightnesspale                1.024e+00  1.769e-01   5.790 1.10e-08 ***
## Lightnesssaturated           4.619e-01  1.207e-01   3.827 0.000142 ***
## Lightnesssoft                4.665e-01  1.248e-01   3.738 0.000202 ***
## Saturationgray              -1.993e-02  3.240e-02  -0.615 0.538765    
## Saturationmuted             -1.097e-02  3.098e-02  -0.354 0.723496    
## Saturationneutral           -2.988e-03  3.065e-02  -0.098 0.922355    
## Saturationpure              -5.557e-02  4.475e-02  -1.242 0.214840    
## Saturationshaded            -6.139e-04  3.087e-02  -0.020 0.984140    
## Saturationsubdued           -1.784e-02  3.093e-02  -0.577 0.564358    
## R                            1.631e-01  4.856e-02   3.359 0.000829 ***
## G                            4.170e-01  5.547e-02   7.517 1.90e-13 ***
## B                           -4.034e-01  1.302e-01  -3.097 0.002038 ** 
## Hue                          1.331e-01  6.460e-02   2.060 0.039763 *  
## I(R^2)                       4.656e-02  9.126e-03   5.102 4.44e-07 ***
## I(G^2)                       2.743e-02  2.873e-02   0.955 0.340111    
## I(B^2)                      -1.243e-01  3.887e-02  -3.197 0.001456 ** 
## I(Hue^2)                     2.758e-03  1.805e-02   0.153 0.878584    
## R:G                         -8.968e-02  3.068e-02  -2.923 0.003584 ** 
## R:B                         -2.549e-02  2.682e-02  -0.950 0.342234    
## R:Hue                        2.807e-02  3.164e-02   0.887 0.375308    
## G:B                         -1.531e-01  4.267e-02  -3.589 0.000358 ***
## G:Hue                        3.767e-02  3.407e-02   1.105 0.269365    
## B:Hue                        8.085e-02  3.987e-02   2.028 0.043005 *  
## Lightnessdeep:R             -7.683e-03  4.321e-02  -0.178 0.858925    
## Lightnesslight:R             1.451e-01  1.371e-01   1.058 0.290250    
## Lightnessmidtone:R           5.111e-02  4.755e-02   1.075 0.282878    
## Lightnesspale:R             -3.972e-01  3.646e-01  -1.089 0.276491    
## Lightnesssaturated:R         6.853e-02  4.376e-02   1.566 0.117829    
## Lightnesssoft:R              5.585e-02  6.214e-02   0.899 0.369108    
## Lightnessdeep:G              2.248e-01  5.025e-02   4.473 9.12e-06 ***
## Lightnesslight:G            -2.549e-01  1.660e-01  -1.536 0.125121    
## Lightnessmidtone:G           2.251e-01  5.900e-02   3.815 0.000150 ***
## Lightnesspale:G             -8.384e-01  4.563e-01  -1.837 0.066623 .  
## Lightnesssaturated:G         2.284e-01  5.160e-02   4.427 1.13e-05 ***
## Lightnesssoft:G              7.211e-02  8.030e-02   0.898 0.369491    
## Lightnessdeep:B              5.033e-01  1.260e-01   3.996 7.18e-05 ***
## Lightnesslight:B             4.837e-01  1.612e-01   3.001 0.002799 ** 
## Lightnessmidtone:B           5.347e-01  1.315e-01   4.067 5.36e-05 ***
## Lightnesspale:B              4.947e-01  2.378e-01   2.080 0.037898 *  
## Lightnesssaturated:B         5.630e-01  1.260e-01   4.468 9.34e-06 ***
## Lightnesssoft:B              5.238e-01  1.354e-01   3.870 0.000120 ***
## Lightnessdeep:Hue           -2.048e-01  6.599e-02  -3.104 0.001994 ** 
## Lightnesslight:Hue          -1.332e-01  7.253e-02  -1.837 0.066663 .  
## Lightnessmidtone:Hue        -1.589e-01  6.363e-02  -2.497 0.012772 *  
## Lightnesspale:Hue           -1.625e-01  8.888e-02  -1.828 0.068054 .  
## Lightnesssaturated:Hue      -1.444e-01  6.403e-02  -2.255 0.024445 *  
## Lightnesssoft:Hue           -1.594e-01  6.563e-02  -2.429 0.015411 *  
## Lightnessdeep:I(R^2)        -9.327e-03  8.874e-03  -1.051 0.293647    
## Lightnesslight:I(R^2)       -7.219e-02  7.377e-02  -0.979 0.328193    
## Lightnessmidtone:I(R^2)      4.760e-03  1.779e-02   0.268 0.789132    
## Lightnesspale:I(R^2)         2.988e-01  2.231e-01   1.339 0.180897    
## Lightnesssaturated:I(R^2)    1.755e-03  9.818e-03   0.179 0.858179    
## Lightnesssoft:I(R^2)         1.044e-02  3.702e-02   0.282 0.778111    
## Lightnessdeep:I(G^2)         6.690e-02  2.053e-02   3.258 0.001180 ** 
## Lightnesslight:I(G^2)        3.432e-01  1.293e-01   2.654 0.008158 ** 
## Lightnessmidtone:I(G^2)      9.108e-02  4.472e-02   2.037 0.042087 *  
## Lightnesspale:I(G^2)         2.753e-01  3.361e-01   0.819 0.413091    
## Lightnesssaturated:I(G^2)    6.074e-02  2.885e-02   2.105 0.035667 *  
## Lightnesssoft:I(G^2)         2.548e-01  7.069e-02   3.605 0.000337 ***
## Lightnessdeep:I(B^2)         1.410e-01  3.725e-02   3.785 0.000168 ***
## Lightnesslight:I(B^2)        2.097e-01  6.587e-02   3.184 0.001523 ** 
## Lightnessmidtone:I(B^2)      1.675e-01  3.893e-02   4.304 1.94e-05 ***
## Lightnesspale:I(B^2)         2.031e-01  1.088e-01   1.867 0.062397 .  
## Lightnesssaturated:I(B^2)    1.667e-01  3.840e-02   4.341 1.65e-05 ***
## Lightnesssoft:I(B^2)         2.116e-01  4.521e-02   4.681 3.49e-06 ***
## Lightnessdeep:I(Hue^2)      -3.804e-03  1.196e-02  -0.318 0.750587    
## Lightnesslight:I(Hue^2)     -4.958e-03  1.680e-02  -0.295 0.768033    
## Lightnessmidtone:I(Hue^2)   -6.729e-03  1.573e-02  -0.428 0.668964    
## Lightnesspale:I(Hue^2)      -4.647e-03  1.597e-02  -0.291 0.771202    
## Lightnesssaturated:I(Hue^2) -1.369e-02  1.332e-02  -1.028 0.304457    
## Lightnesssoft:I(Hue^2)      -1.799e-02  1.718e-02  -1.047 0.295356    
## Saturationgray:R            -7.726e-03  1.336e-01  -0.058 0.953893    
## Saturationmuted:R            1.801e-03  2.805e-02   0.064 0.948832    
## Saturationneutral:R          6.425e-02  5.264e-02   1.221 0.222646    
## Saturationpure:R             3.889e-02  3.024e-02   1.286 0.198924    
## Saturationshaded:R          -8.517e-03  3.989e-02  -0.214 0.830987    
## Saturationsubdued:R          2.550e-02  3.297e-02   0.773 0.439627    
## Saturationgray:G            -1.178e-01  1.631e-01  -0.723 0.470079    
## Saturationmuted:G            6.606e-04  4.410e-02   0.015 0.988053    
## Saturationneutral:G         -6.066e-02  8.990e-02  -0.675 0.500036    
## Saturationpure:G             4.224e-02  3.922e-02   1.077 0.281977    
## Saturationshaded:G           3.322e-02  6.555e-02   0.507 0.612488    
## Saturationsubdued:G         -4.625e-04  5.330e-02  -0.009 0.993079    
## Saturationgray:B             1.423e-01  1.037e-01   1.372 0.170585    
## Saturationmuted:B            2.640e-02  3.160e-02   0.835 0.403833    
## Saturationneutral:B          1.006e-02  5.894e-02   0.171 0.864533    
## Saturationpure:B            -3.860e-02  2.836e-02  -1.361 0.174060    
## Saturationshaded:B          -2.382e-02  4.371e-02  -0.545 0.585942    
## Saturationsubdued:B          7.099e-04  3.715e-02   0.019 0.984760    
## Saturationgray:Hue           3.804e-05  3.719e-02   0.001 0.999184    
## Saturationmuted:Hue          2.625e-04  2.964e-02   0.009 0.992935    
## Saturationneutral:Hue        4.235e-02  3.060e-02   1.384 0.166833    
## Saturationpure:Hue           3.863e-02  3.700e-02   1.044 0.296781    
## Saturationshaded:Hue         2.487e-02  3.019e-02   0.824 0.410388    
## Saturationsubdued:Hue        1.873e-02  3.018e-02   0.621 0.535115    
## Saturationgray:I(R^2)        3.507e-01  4.509e-01   0.778 0.437004    
## Saturationmuted:I(R^2)       4.412e-02  1.405e-02   3.140 0.001768 ** 
## Saturationneutral:I(R^2)    -9.845e-02  1.477e-01  -0.667 0.505320    
## Saturationpure:I(R^2)        8.689e-03  1.047e-02   0.830 0.406918    
## Saturationshaded:I(R^2)     -9.684e-02  4.802e-02  -2.016 0.044166 *  
## Saturationsubdued:I(R^2)     4.059e-02  2.604e-02   1.559 0.119496    
## Saturationgray:I(G^2)        5.604e-02  6.991e-01   0.080 0.936140    
## Saturationmuted:I(G^2)       5.587e-02  3.474e-02   1.608 0.108240    
## Saturationneutral:I(G^2)    -4.113e-01  2.176e-01  -1.890 0.059155 .  
## Saturationpure:I(G^2)        1.592e-02  2.779e-02   0.573 0.566951    
## Saturationshaded:I(G^2)     -6.115e-02  9.074e-02  -0.674 0.500651    
## Saturationsubdued:I(G^2)    -1.946e-02  4.496e-02  -0.433 0.665365    
## Saturationgray:I(B^2)       -2.276e-01  2.326e-01  -0.979 0.328175    
## Saturationmuted:I(B^2)       5.149e-02  2.271e-02   2.267 0.023712 *  
## Saturationneutral:I(B^2)    -1.073e-01  7.824e-02  -1.371 0.170714    
## Saturationpure:I(B^2)       -1.393e-02  1.544e-02  -0.902 0.367315    
## Saturationshaded:I(B^2)      4.691e-02  4.523e-02   1.037 0.300067    
## Saturationsubdued:I(B^2)     8.180e-03  2.649e-02   0.309 0.757579    
## Saturationgray:I(Hue^2)     -1.191e-03  1.931e-02  -0.062 0.950860    
## Saturationmuted:I(Hue^2)    -2.269e-02  1.532e-02  -1.481 0.139129    
## Saturationneutral:I(Hue^2)   1.765e-02  1.625e-02   1.086 0.277735    
## Saturationpure:I(Hue^2)      1.881e-02  1.399e-02   1.344 0.179297    
## Saturationshaded:I(Hue^2)    1.850e-03  1.498e-02   0.124 0.901716    
## Saturationsubdued:I(Hue^2)  -1.052e-02  1.565e-02  -0.672 0.501866    
## Lightnessdeep:R:G            5.004e-03  1.886e-02   0.265 0.790829    
## Lightnesslight:R:G           2.533e-01  1.325e-01   1.912 0.056313 .  
## Lightnessmidtone:R:G         1.229e-01  4.718e-02   2.605 0.009400 ** 
## Lightnesspale:R:G            6.957e-01  3.645e-01   1.909 0.056737 .  
## Lightnesssaturated:R:G       7.351e-02  2.793e-02   2.632 0.008697 ** 
## Lightnesssoft:R:G            1.012e-01  7.566e-02   1.337 0.181688    
## Lightnessdeep:R:B           -1.165e-02  2.386e-02  -0.488 0.625555    
## Lightnesslight:R:B          -6.598e-02  1.026e-01  -0.643 0.520627    
## Lightnessmidtone:R:B         4.224e-03  3.560e-02   0.119 0.905599    
## Lightnesspale:R:B           -4.629e-01  2.799e-01  -1.654 0.098693 .  
## Lightnesssaturated:R:B       4.583e-02  2.789e-02   1.643 0.100854    
## Lightnesssoft:R:B            8.404e-03  5.500e-02   0.153 0.878610    
## Lightnessdeep:R:Hue         -3.360e-03  1.729e-02  -0.194 0.845981    
## Lightnesslight:R:Hue         2.510e-03  4.470e-02   0.056 0.955240    
## Lightnessmidtone:R:Hue      -6.270e-03  2.600e-02  -0.241 0.809552    
## Lightnesspale:R:Hue          1.245e-01  7.858e-02   1.585 0.113513    
## Lightnesssaturated:R:Hue    -1.019e-02  2.032e-02  -0.501 0.616402    
## Lightnesssoft:R:Hue         -1.562e-03  3.446e-02  -0.045 0.963867    
## Lightnessdeep:G:B            1.105e-01  3.088e-02   3.577 0.000373 ***
## Lightnesslight:G:B           2.217e-01  1.222e-01   1.815 0.070063 .  
## Lightnessmidtone:G:B         1.522e-01  4.758e-02   3.198 0.001450 ** 
## Lightnesspale:G:B            5.919e-01  3.194e-01   1.853 0.064293 .  
## Lightnesssaturated:G:B       1.181e-01  3.608e-02   3.272 0.001127 ** 
## Lightnesssoft:G:B            1.260e-01  7.096e-02   1.776 0.076212 .  
## Lightnessdeep:G:Hue         -8.602e-02  3.038e-02  -2.831 0.004787 ** 
## Lightnesslight:G:Hue        -3.120e-03  5.601e-02  -0.056 0.955598    
## Lightnessmidtone:G:Hue      -1.028e-02  4.171e-02  -0.247 0.805323    
## Lightnesspale:G:Hue         -4.457e-02  7.640e-02  -0.583 0.559832    
## Lightnesssaturated:G:Hue    -2.276e-02  3.461e-02  -0.658 0.511073    
## Lightnesssoft:G:Hue         -4.206e-02  4.633e-02  -0.908 0.364315    
## Lightnessdeep:B:Hue         -8.372e-02  2.929e-02  -2.858 0.004397 ** 
## Lightnesslight:B:Hue        -1.382e-01  4.860e-02  -2.844 0.004604 ** 
## Lightnessmidtone:B:Hue      -8.364e-02  3.525e-02  -2.373 0.017953 *  
## Lightnesspale:B:Hue         -1.631e-01  6.742e-02  -2.418 0.015863 *  
## Lightnesssaturated:B:Hue    -5.887e-02  3.048e-02  -1.931 0.053880 .  
## Lightnesssoft:B:Hue         -8.644e-02  4.071e-02  -2.123 0.034103 *  
## Saturationgray:R:G          -7.067e-01  1.068e+00  -0.662 0.508266    
## Saturationmuted:R:G         -1.153e-01  3.770e-02  -3.059 0.002316 ** 
## Saturationneutral:R:G        3.978e-01  3.232e-01   1.231 0.218819    
## Saturationpure:R:G           3.260e-06  3.163e-02   0.000 0.999918    
## Saturationshaded:R:G         1.796e-01  1.030e-01   1.743 0.081838 .  
## Saturationsubdued:R:G       -7.833e-02  5.576e-02  -1.405 0.160537    
## Saturationgray:R:B           2.204e-01  4.825e-01   0.457 0.647921    
## Saturationmuted:R:B          5.159e-02  3.079e-02   1.676 0.094308 .  
## Saturationneutral:R:B       -2.422e-01  1.320e-01  -1.836 0.066895 .  
## Saturationpure:R:B          -1.737e-02  2.142e-02  -0.811 0.417748    
## Saturationshaded:R:B        -4.993e-02  7.533e-02  -0.663 0.507664    
## Saturationsubdued:R:B        1.566e-02  3.860e-02   0.406 0.684978    
## Saturationgray:R:Hue         5.358e-02  1.348e-01   0.397 0.691182    
## Saturationmuted:R:Hue       -8.457e-03  3.214e-02  -0.263 0.792529    
## Saturationneutral:R:Hue      3.524e-02  7.918e-02   0.445 0.656472    
## Saturationpure:R:Hue        -2.235e-02  3.801e-02  -0.588 0.556769    
## Saturationshaded:R:Hue      -4.133e-02  4.705e-02  -0.878 0.380105    
## Saturationsubdued:R:Hue     -2.503e-02  3.787e-02  -0.661 0.508859    
## Saturationgray:G:B           3.230e-01  6.893e-01   0.469 0.639517    
## Saturationmuted:G:B         -7.810e-02  5.265e-02  -1.483 0.138482    
## Saturationneutral:G:B        4.554e-01  2.291e-01   1.987 0.047300 *  
## Saturationpure:G:B           1.353e-02  3.454e-02   0.392 0.695318    
## Saturationshaded:G:B        -3.276e-02  1.269e-01  -0.258 0.796315    
## Saturationsubdued:G:B        4.067e-02  6.306e-02   0.645 0.519252    
## Saturationgray:G:Hue        -1.215e-01  1.564e-01  -0.777 0.437700    
## Saturationmuted:G:Hue       -1.850e-02  2.909e-02  -0.636 0.525012    
## Saturationneutral:G:Hue     -1.792e-02  7.618e-02  -0.235 0.814089    
## Saturationpure:G:Hue         9.135e-03  2.725e-02   0.335 0.737552    
## Saturationshaded:G:Hue       7.314e-02  4.226e-02   1.731 0.083967 .  
## Saturationsubdued:G:Hue     -3.253e-02  3.569e-02  -0.911 0.362429    
## Saturationgray:B:Hue         7.069e-02  9.546e-02   0.741 0.459213    
## Saturationmuted:B:Hue        1.980e-02  3.904e-02   0.507 0.612201    
## Saturationneutral:B:Hue     -4.266e-02  5.169e-02  -0.825 0.409520    
## Saturationpure:B:Hue        -7.908e-03  3.794e-02  -0.208 0.834956    
## Saturationshaded:B:Hue      -5.420e-02  4.629e-02  -1.171 0.242119    
## Saturationsubdued:B:Hue      2.367e-02  4.151e-02   0.570 0.568665    
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## Residual standard error: 0.03175 on 640 degrees of freedom
## Multiple R-squared:  0.9992, Adjusted R-squared:  0.999 
## F-statistic:  4260 on 194 and 640 DF,  p-value: < 2.2e-16
re_load_mod05 %>% summary()
## 
## Call:
## lm(formula = y ~ (Lightness + Saturation) * (R + G + B + Hue), 
##     data = df_standard)
## 
## Residuals:
##       Min        1Q    Median        3Q       Max 
## -0.218807 -0.029587 -0.002964  0.023051  0.211793 
## 
## Coefficients:
##                          Estimate Std. Error t value Pr(>|t|)    
## (Intercept)             0.0156508  0.0333284   0.470 0.638778    
## Lightnessdeep          -0.0902948  0.0344015  -2.625 0.008844 ** 
## Lightnesslight         -0.3303232  0.0419341  -7.877 1.14e-14 ***
## Lightnessmidtone       -0.0813395  0.0341341  -2.383 0.017417 *  
## Lightnesspale          -0.6291915  0.0528630 -11.902  < 2e-16 ***
## Lightnesssaturated     -0.0625005  0.0334789  -1.867 0.062300 .  
## Lightnesssoft          -0.1545354  0.0356907  -4.330 1.69e-05 ***
## Saturationgray         -0.0440442  0.0115286  -3.820 0.000144 ***
## Saturationmuted        -0.0291692  0.0063463  -4.596 5.03e-06 ***
## Saturationneutral      -0.0618573  0.0067847  -9.117  < 2e-16 ***
## Saturationpure          0.0518827  0.0071504   7.256 9.75e-13 ***
## Saturationshaded       -0.0531590  0.0064988  -8.180 1.17e-15 ***
## Saturationsubdued      -0.0478015  0.0064215  -7.444 2.62e-13 ***
## R                       0.1892266  0.0088042  21.493  < 2e-16 ***
## G                       0.6312372  0.0116501  54.183  < 2e-16 ***
## B                       0.1602681  0.0173716   9.226  < 2e-16 ***
## Hue                    -0.0573637  0.0094517  -6.069 2.02e-09 ***
## Lightnessdeep:R         0.0018104  0.0094027   0.193 0.847370    
## Lightnesslight:R        0.1036261  0.0244211   4.243 2.47e-05 ***
## Lightnessmidtone:R      0.0151836  0.0133692   1.136 0.256430    
## Lightnesspale:R         0.1812149  0.0433573   4.180 3.26e-05 ***
## Lightnesssaturated:R   -0.0007776  0.0106664  -0.073 0.941904    
## Lightnesssoft:R         0.0492488  0.0186494   2.641 0.008439 ** 
## Lightnessdeep:G        -0.0580356  0.0116771  -4.970 8.25e-07 ***
## Lightnesslight:G        0.3380262  0.0295808  11.427  < 2e-16 ***
## Lightnessmidtone:G      0.0665761  0.0164872   4.038 5.93e-05 ***
## Lightnesspale:G         0.5858475  0.0457749  12.798  < 2e-16 ***
## Lightnesssaturated:G   -0.0118009  0.0126143  -0.936 0.349815    
## Lightnesssoft:G         0.2086321  0.0225868   9.237  < 2e-16 ***
## Lightnessdeep:B        -0.0494322  0.0201075  -2.458 0.014175 *  
## Lightnesslight:B       -0.0043713  0.0248889  -0.176 0.860630    
## Lightnessmidtone:B     -0.0644335  0.0201605  -3.196 0.001450 ** 
## Lightnesspale:B         0.0233714  0.0316768   0.738 0.460855    
## Lightnesssaturated:B   -0.0469872  0.0202082  -2.325 0.020323 *  
## Lightnesssoft:B        -0.0545076  0.0210505  -2.589 0.009797 ** 
## Lightnessdeep:Hue       0.0392576  0.0101835   3.855 0.000125 ***
## Lightnesslight:Hue      0.0525404  0.0098568   5.330 1.29e-07 ***
## Lightnessmidtone:Hue    0.0473387  0.0102896   4.601 4.92e-06 ***
## Lightnesspale:Hue       0.0544998  0.0103466   5.267 1.80e-07 ***
## Lightnesssaturated:Hue  0.0375392  0.0099384   3.777 0.000171 ***
## Lightnesssoft:Hue       0.0534146  0.0103915   5.140 3.48e-07 ***
## Saturationgray:R       -0.1580780  0.0675196  -2.341 0.019475 *  
## Saturationmuted:R      -0.0187794  0.0114883  -1.635 0.102531    
## Saturationneutral:R     0.0608227  0.0285038   2.134 0.033171 *  
## Saturationpure:R       -0.0011462  0.0086715  -0.132 0.894881    
## Saturationshaded:R      0.0127518  0.0200588   0.636 0.525149    
## Saturationsubdued:R     0.0060838  0.0152335   0.399 0.689732    
## Saturationgray:G        0.2688204  0.0762993   3.523 0.000451 ***
## Saturationmuted:G       0.0382669  0.0135997   2.814 0.005021 ** 
## Saturationneutral:G    -0.0058799  0.0331360  -0.177 0.859204    
## Saturationpure:G        0.0340289  0.0110360   3.083 0.002119 ** 
## Saturationshaded:G      0.0410349  0.0237407   1.728 0.084307 .  
## Saturationsubdued:G     0.0330834  0.0176914   1.870 0.061859 .  
## Saturationgray:B       -0.0628830  0.0409159  -1.537 0.124733    
## Saturationmuted:B       0.0008769  0.0100322   0.087 0.930373    
## Saturationneutral:B     0.0171906  0.0194636   0.883 0.377396    
## Saturationpure:B       -0.0400486  0.0086671  -4.621 4.48e-06 ***
## Saturationshaded:B      0.0014420  0.0148811   0.097 0.922827    
## Saturationsubdued:B     0.0057016  0.0118594   0.481 0.630819    
## Saturationgray:Hue      0.0016934  0.0121588   0.139 0.889268    
## Saturationmuted:Hue    -0.0075430  0.0087080  -0.866 0.386646    
## Saturationneutral:Hue   0.0313044  0.0097180   3.221 0.001330 ** 
## Saturationpure:Hue      0.0137725  0.0085005   1.620 0.105598    
## Saturationshaded:Hue    0.0020142  0.0091012   0.221 0.824907    
## Saturationsubdued:Hue   0.0027768  0.0094296   0.294 0.768477    
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## Residual standard error: 0.04872 on 770 degrees of freedom
## Multiple R-squared:  0.9978, Adjusted R-squared:  0.9976 
## F-statistic:  5477 on 64 and 770 DF,  p-value: < 2.2e-16

Part ii: Regression– iiB) Bayesian Linear models

You have explored the relationships; next you must consider the UNCERTAINTY on the residual error through Bayesian modeling techniques! #1.Fit 2 Bayesian linear models – one must be the best model from iiA) : model 9 the second must be another model you fit in iiA): model 8 • State why you chose the second model. • You may use the Laplace Approximation approach we used in lecture and the homework assignments. • Alternatively, you may use rstanarm’s stan_lm() or stan_glm() function to fit full Bayesian linear models with syntax like R’s lm(). • Resources to help with rstanarm if you’re interested: • How to Use the rstanarm Package (r-project.org) • Estimating Regularized Linear Models with rstanarm (r-project.org) • Extra examples also provided on Canvas.

• After fitting the 2 models, you must identify the best model. • Which performance metric did you use to make your selection? • Visualize the regression coefficient posterior summary statistics for your best model. • For your best model: Study the posterior UNCERTAINTY on the likelihood noise (residual error), 𝜎. • How does the lm() maximum likelihood estimate (MLE) on 𝜎 relate to the posterior UNCERTAINTY on 𝜎? • Do you feel the posterior is precise or are we quite uncertain about 𝜎?

Xmat_08 <- model.matrix(re_load_mod08, dfii)
Xmat_09 <- model.matrix(re_load_mod09, dfii)

Create information with prior The info_09 list corresponds to the information for model 9, while info_08 corresponds to the information for model 08. Specify the shared prior mean, mu_beta, to be 0, the shared prior standard deviation, tau_beta, as 2. The prior rate parameter on the noise, sigma_rate, is assigned to 1.

info_08 <- list(
  yobs = df_standard$y,
  design_matrix = Xmat_08,
  mu_beta = 0,
  tau_beta = 2,
  sigma_rate = 1
)

info_09 <- list(
  yobs = df_standard$y,
  design_matrix = Xmat_09,
  mu_beta = 0,
  tau_beta = 2,
  sigma_rate = 1
)

Define the log-posterior function lm_logpost(). Use the log-transformation on σ, and so we will actually define the log-posterior in terms of the regression coefficients, β, and the unbounded noise parameter, φ=log[σ].

lm_logpost <- function(unknowns, my_info)
{
 
  length_beta <- length(unknowns)-1
  

  beta_v <- unknowns[1:length_beta] %>% as.vector()
  

  lik_varphi <- unknowns[length(unknowns)]
  

  lik_sigma <- exp(lik_varphi)
  
 
  X <- my_info$design_matrix
  
  # calculate the linear predictor
  mu <- X %*% beta_v
  
  # evaluate the log-likelihood
  log_lik <- sum(dnorm(x = my_info$yobs, mean = mu, sd = lik_sigma, log = TRUE))
  
  # evaluate the log-prior
  log_prior_beta <- sum(dnorm(x = beta_v, mean = my_info$mu_beta, sd = my_info$tau_beta, log = TRUE))
  
  log_prior_sigma <- dexp(x = lik_sigma, rate = my_info$sigma_rate, log = TRUE)
  
  # add the mean trend prior and noise prior together
  log_prior <- log_prior_beta + log_prior_sigma
  
  # account for the transformation
  log_derive_adjust <- lik_varphi
  
  # sum together
  return(log_lik + log_prior + log_derive_adjust)
}

The my_laplace() function is below

my_laplace <- function(start_guess, logpost_func, ...)
{
 
  fit <- optim(start_guess,
               logpost_func,
               gr = NULL,
               ...,
               method = "BFGS",
               hessian = TRUE,
               control = list(fnscale = -1, maxit = 1001))
  
  mode <- fit$par
  post_var_matrix <- -solve(fit$hessian)
  p <- length(mode) # number of unknown parameters
  int <- p/2 * log(2*pi) + 0.5*log(det(post_var_matrix)) + logpost_func(mode, ...)
  # package all of the results into a list
  list(mode = mode,
       var_matrix = post_var_matrix,
       log_evidence = int,
       converge = ifelse(fit$convergence == 0,
                         "YES", 
                         "NO"),
       iter_counts = as.numeric(fit$counts[1]))
}

Fit the Bayesian linear model 08.

init_guess <- rep(0, ncol(Xmat_08)+1)
laplace_quad_08 <- my_laplace(init_guess, lm_logpost, info_08)

laplace_quad_08$converge
## [1] "YES"
laplace_quad_08$log_evidence
## [1] -Inf

Display the posterior mode and posterior standard deviations

cat("posterior mode: ", laplace_quad_08$mode, "\n\n")
## posterior mode:  -0.5513629 0.4137779 0.5438713 0.4166352 0.9896795 0.4397414 0.4431632 -0.01954001 -0.01091312 -0.002595044 -0.05529535 -0.0005549269 -0.01760662 0.166577 0.4209329 -0.3788962 0.1283146 0.04685251 0.02766046 -0.1179428 0.001884472 -0.08980574 -0.02363128 0.0282098 -0.1501897 0.03664078 0.07847458 -0.01062842 0.1386229 0.04779992 -0.4103492 0.06526968 0.05186577 0.2204176 -0.2522191 0.2207841 -0.8000247 0.223685 0.06949775 0.481451 0.4586167 0.5103998 0.457466 0.5398372 0.4990376 -0.2002474 -0.1293847 -0.1541937 -0.1591516 -0.1400747 -0.1550027 -0.009295456 -0.07118169 0.005019286 0.2996579 0.001819671 0.01064443 0.06702035 0.3412107 0.09227306 0.2703786 0.06181998 0.2545549 0.1355801 0.2047849 0.1618887 0.2009575 0.161222 0.2058263 -0.003823479 -0.004469975 -0.006330811 -0.004175158 -0.01386251 -0.01743817 -0.01915716 0.002064728 0.06427014 0.03872306 -0.008951481 0.02518748 -0.09967199 0.0004209751 -0.0617041 0.04179359 0.03413876 -0.0001129228 0.133334 0.02621953 0.011243 -0.0380661 -0.02441206 0.0004063613 -0.0001531142 0.0006291646 0.04206827 0.03795517 0.02495524 0.0187025 0.2865317 0.04393491 -0.08814013 0.008547119 -0.09753258 0.04014154 -0.02129725 0.05561047 -0.3963846 0.01603608 -0.06002794 -0.01903479 -0.228901 0.05114366 -0.1041692 -0.01339706 0.04719936 0.007861356 -0.0006670487 -0.02244184 0.01796383 0.01835175 0.002328396 -0.0100891 0.004484463 0.2524113 0.1213733 0.6786898 0.07215723 0.1005221 -0.01312053 -0.06498183 0.003397747 -0.4410236 0.04432404 0.007837539 -0.003116209 0.001980745 -0.006511085 0.122212 -0.01023658 -0.002219608 0.1076572 0.2152946 0.1481963 0.573516 0.1141759 0.1218802 -0.08552693 -0.001482859 -0.01005069 -0.0441347 -0.02260677 -0.04094213 -0.08129615 -0.1350991 -0.08065662 -0.1572569 -0.05638528 -0.08324537 -0.5585624 -0.1146378 0.375611 0.0002207609 0.1805006 -0.07772967 0.1749691 0.05110805 -0.2373467 -0.01693302 -0.05041805 0.01552658 0.0397311 -0.00873217 0.03991291 -0.02223833 -0.04172893 -0.02496097 0.3599958 -0.07757206 0.4439821 0.01259088 -0.03439237 0.04017654 -0.1032538 -0.01785263 -0.02215065 0.00944382 0.07389894 -0.03230595 0.06372182 0.01917394 -0.04205479 -0.00761598 -0.05475638 0.02338721 -3.582063
cat("posterior standard deviations: ", sqrt(diag(laplace_quad_08$var_matrix)))
## posterior standard deviations:  0.1055191 0.1013263 0.1138046 0.106519 0.1520288 0.1035246 0.1070151 0.0282161 0.02708923 0.02677497 0.03914197 0.02698191 0.02703662 0.04224551 0.04829358 0.1117206 0.05619587 0.007982489 0.02507312 0.03349619 0.01575294 0.02680348 0.02335575 0.02766681 0.03720479 0.02971207 0.03481135 0.03762075 0.1193895 0.04136778 0.3095488 0.03806171 0.05417495 0.04375112 0.144329 0.05144059 0.3861441 0.04491607 0.07003661 0.108209 0.1391186 0.1127831 0.2039894 0.1081036 0.1162216 0.05752893 0.06318651 0.05536051 0.07693626 0.05575184 0.05715865 0.007770032 0.06451125 0.01557529 0.1927278 0.008596145 0.03240892 0.01795947 0.1125866 0.03909418 0.2850895 0.0252202 0.06164596 0.03218241 0.05733104 0.03362629 0.093839 0.03321139 0.03915902 0.0104514 0.01466904 0.01372663 0.01391657 0.01163239 0.01500194 0.1131105 0.02455016 0.0460158 0.02646017 0.0348833 0.02885199 0.1332353 0.03858012 0.07850195 0.03430838 0.05728722 0.04660154 0.08544275 0.0276601 0.05140531 0.02480396 0.03822344 0.03249074 0.03223249 0.0259167 0.02671771 0.03235385 0.02639257 0.02639288 0.3444908 0.01230008 0.1272758 0.009164727 0.04199971 0.02279117 0.5127114 0.03037423 0.1867102 0.02430245 0.07916347 0.03931825 0.1920252 0.0198713 0.0677061 0.01349802 0.03950466 0.02317321 0.01660027 0.01341008 0.0141711 0.01223486 0.01307669 0.01368611 0.01649349 0.1155325 0.04126031 0.3112995 0.02441337 0.06608493 0.02081569 0.08964531 0.03112128 0.2392698 0.02435778 0.04809415 0.01513483 0.03907455 0.02276158 0.06818192 0.01779082 0.03013038 0.02687435 0.106475 0.04148795 0.2733178 0.03138539 0.0619568 0.02656696 0.04882158 0.03646567 0.06611806 0.03025403 0.04038842 0.02554426 0.04239234 0.03074304 0.05844103 0.026586 0.03551517 0.7871181 0.03296569 0.277536 0.027661 0.08997499 0.04876403 0.3685498 0.0269305 0.1137736 0.01872412 0.06574094 0.0337582 0.1113503 0.02812207 0.06868927 0.03324323 0.04115023 0.03313617 0.5320436 0.04604977 0.1975533 0.03018513 0.1107034 0.0551565 0.1247469 0.0254364 0.06583479 0.02384029 0.03693237 0.03121329 0.078693 0.03411972 0.04497596 0.03318185 0.04042994 0.03628108 0.024487

Fit the Bayesian linear model 09

init_guess <- rep(0, ncol(Xmat_09)+1)
laplace_quad_09 <- my_laplace(init_guess, lm_logpost, info_09)

laplace_quad_09$converge
## [1] "YES"
laplace_quad_09$log_evidence
## [1] 1218.426

Display the posterior mode and posterior standard deviations

cat("posterior mode: ", laplace_quad_09$mode, "\n\n")
## posterior mode:  -0.1657045 0.03532588 0.005869348 0.04562268 0.05738303 0.05023855 0.01902506 0.005425965 0.008165973 -0.00189388 0.02175495 0.003596849 0.0007556507 0.2396139 0.6454933 0.1285493 0.01294464 0.04906861 0.09574578 0.03968679 -0.01621884 0.01773994 -0.005136824 -0.01238986 0.007694969 0.02452262 -0.03187443 0.09941477 -0.04764033 0.01165765 0.03131431 -0.005566811 -3.124398
cat("posterior standard deviations: ", sqrt(diag(laplace_quad_09$var_matrix)))
## posterior standard deviations:  0.0171563 0.008240401 0.01884723 0.01582103 0.01961775 0.01232755 0.01765138 0.008073312 0.005831563 0.006728248 0.006187497 0.006421094 0.006156087 0.007259569 0.01044355 0.009565383 0.005668725 0.004200285 0.00772694 0.0058205 0.003243595 0.008423175 0.007202855 0.009852411 0.008447183 0.007011812 0.009478973 0.004242994 0.006585712 0.008049399 0.005039858 0.003228666 0.02448401

Calculate the posterior model weight

cat("log_evidence model 09: ", laplace_quad_09$log_evidence, "\n\n")
## log_evidence model 09:  1218.426
cat("log_evidence model 08: ", laplace_quad_08$log_evidence, "\n\n")
## log_evidence model 08:  -Inf

Model 09 is the best one in Laplace Approximation approach, Residual standard error: 0.04481

varphi_09 <- laplace_quad_09$mode[length(laplace_quad_09$mode)]
cat("posterior UNCERTAINTY  model 09: ", exp(varphi_09), "\n")
## posterior UNCERTAINTY  model 09:  0.04396338

posterior standard deviations: 0.02448401

Since 0.04481 and 0.04396338 are close, and the standard deviations is quite large compared with posterior uncertainty, we could say that it is uncertain about the σ

##Part ii: Regression – iiC) Linear models Predictions

viz_grid <- expand.grid(R = 0,
                        G = seq(-3, 3, length.out=75),
                        B = 0,
                        Hue = seq(-2.5, 2.5, length.out=6),
                        #Lightness = unique(df_standard$Lightness),
                        Lightness = "dark",
                        #Saturation = unique(df_standard$Saturation),
                        Saturation = "gray",
                        KEEP.OUT.ATTRS = FALSE, 
                        stringsAsFactors = FALSE) %>% 
  as.data.frame() %>% tibble::as_tibble()

viz_grid %>% glimpse()
## Rows: 450
## Columns: 6
## $ R          <dbl> 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,…
## $ G          <dbl> -3.000000, -2.918919, -2.837838, -2.756757, -2.675676, -2.5…
## $ B          <dbl> 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,…
## $ Hue        <dbl> -2.5, -2.5, -2.5, -2.5, -2.5, -2.5, -2.5, -2.5, -2.5, -2.5,…
## $ Lightness  <chr> "dark", "dark", "dark", "dark", "dark", "dark", "dark", "da…
## $ Saturation <chr> "gray", "gray", "gray", "gray", "gray", "gray", "gray", "gr…
tidy_predict <- function(mod, xnew)
{
  pred_df <- predict(mod, xnew, interval = "confidence") %>% 
    as.data.frame() %>% tibble::as_tibble() %>% 
    dplyr::select(pred = fit, ci_lwr = lwr, ci_upr = upr) %>% 
    bind_cols(predict(mod, xnew, interval = 'prediction') %>% 
                as.data.frame() %>% tibble::as_tibble() %>% 
                dplyr::select(pred_lwr = lwr, pred_upr = upr))
  
  xnew %>% bind_cols(pred_df)
}

#Use non-Bayesian models for the predictions Make predictions with each of the models

pred_lm_09 <- tidy_predict(re_load_mod09, viz_grid)

pred_lm_09 %>% glimpse()
## Rows: 450
## Columns: 11
## $ R          <dbl> 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,…
## $ G          <dbl> -3.000000, -2.918919, -2.837838, -2.756757, -2.675676, -2.5…
## $ B          <dbl> 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,…
## $ Hue        <dbl> -2.5, -2.5, -2.5, -2.5, -2.5, -2.5, -2.5, -2.5, -2.5, -2.5,…
## $ Lightness  <chr> "dark", "dark", "dark", "dark", "dark", "dark", "dark", "da…
## $ Saturation <chr> "gray", "gray", "gray", "gray", "gray", "gray", "gray", "gr…
## $ pred       <dbl> -1.1848697, -1.1834536, -1.1807786, -1.1768445, -1.1716515,…
## $ ci_lwr     <dbl> -1.3401496, -1.3317272, -1.3222454, -1.3117049, -1.3001065,…
## $ ci_upr     <dbl> -1.0295898, -1.0351801, -1.0393118, -1.0419842, -1.0431966,…
## $ pred_lwr   <dbl> -1.3633269, -1.3558489, -1.3473557, -1.3378489, -1.3273300,…
## $ pred_upr   <dbl> -1.0064126, -1.0110583, -1.0142014, -1.0158402, -1.0159731,…

visualization for each model’s prediction

pred_lm_09 %>% ggplot(aes(x = G)) + geom_ribbon(aes(ymin = pred_lwr, ymax = pred_upr), fill = 'blue') + geom_ribbon(aes(ymin = ci_lwr, ymax = ci_upr), fill = 'grey') +
  geom_line(aes(y = pred)) + facet_wrap(~Hue) + 
  labs(x = "pred_lm_09$x1")

pred_lm_08 <- tidy_predict(re_load_mod08, viz_grid)

compare the visualizations across models include a coord_cartesian() layer with the ylim argument set to c(-2,4)

pred_lm_08 %>% ggplot(aes(x = G)) + geom_ribbon(aes(ymin = pred_lwr, ymax = pred_upr), fill = 'blue') + geom_ribbon(aes(ymin = ci_lwr, ymax = ci_upr), fill = 'grey') +
  geom_line(aes(y = pred)) + facet_wrap(~Hue) + coord_cartesian(ylim = c(-2,4)) +
  labs(x = "pred_lm_08$x1")

When R < 2 ,the predictive trends are similar between the 2 selected linear models.When R > 2, the predictive trends look different. The uncertainty of model 8 is big, showing that model 8 is over-fitting.

##Part ii: Regression – iiD) Train/tune with resampling • Linear models: • All categorical and continuous inputs - linear additive features • Add categorical inputs to all main effect and all pairwise interactions of continuous inputs • The 2 models selected from iiA) (if they are not one of the two above) • Regularized regression with Elastic net • Add categorical inputs to all main effect and all pairwise interactions of continuous inputs • The more complex of the 2 models selected from iiA) • Neural network • Random forest • Gradient boosted tree • 2 methods of your choice that we did not explicitly discuss in lecture You must use ALL categorical and continuous inputs with the non-linear methods

#specify the resampling scheme and primary performance metric

my_ctrl <- trainControl( method = "repeatedcv", number = 10, repeats = 3)

my_metric <- 'RMSE'

You must train, assess, tune, and compare more complex methods

train_lm_01 <- train(y ~ ., 
                   data = dfii,
                   method = "lm",
                   metric = my_metric,
                   preProcess = c("center", "scale"),
                   trControl = my_ctrl)
train_lm_02 <- train(y ~ Lightness + Saturation + (R + G + B + Hue)^2, 
                   data = dfii,
                   method = "lm",
                   metric = my_metric,
                   preProcess = c("center", "scale"),
                   trControl = my_ctrl)
train_lm_09 <- train(y ~ Lightness + Saturation + R*G*B*Hue + I(R^2) + I(G^2) + I(B^2) + I(Hue^2), 
                   data = dfii,
                   method = "lm",
                   metric = my_metric,
                   preProcess = c("center", "scale"),
                   trControl = my_ctrl)
train_lm_09$results
##   intercept     RMSE  Rsquared        MAE      RMSESD   RsquaredSD       MAESD
## 1      TRUE 0.054563 0.9979538 0.04092225 0.006605376 0.0004091835 0.003634049
train_lm_09 %>% coefplot()

train_lm_08 <- train(y ~ (Lightness + Saturation) * (( R + G + B + Hue)^2 + I(R^2) + I(G^2) + I(B^2) + I(Hue^2)), 
                   data = dfii,
                   method = "lm",
                   metric = my_metric,
                   preProcess = c("center", "scale"),
                   trControl = my_ctrl)
train_lm_08$results
##   intercept       RMSE  Rsquared        MAE      RMSESD   RsquaredSD
## 1      TRUE 0.04699707 0.9984425 0.03373372 0.007872184 0.0004992378
##         MAESD
## 1 0.003943451

#Regularized regression with Elastic net Train, assess, and tune the glmnet elastic net model with the defined resampling scheme. Assign the result to the enet_default object and display the result to the screen.

set.seed(1234)

enet_default_01 <- caret::train(y ~ Lightness + Saturation + (R + G + B + Hue)^2, 
                             data = dfii,
                             method = 'glmnet',
                             metric = my_metric,
                             preProcess = c('center', 'scale'),
                             trControl = my_ctrl)

Create a custom tuning grid to further tune the elastic net lambda and alpha tuning parameters.

lambda_seq <- exp(seq(log(min(enet_default_01$results$lambda)),
                          log(max(enet_default_01$results$lambda)),
                          length.out = 25))

enet_grid_01 <- expand.grid(alpha = seq(0.0, 0.15, by = .01), lambda = lambda_seq)

Train, assess, and tune the elastic net model with the custom tuning grid and assign the result to the enet_tune_01 object.

set.seed(1234)

enet_tune_01 <- caret::train(y ~ Lightness + Saturation + (R + G + B + Hue)^2, 
                             data = dfii,
                             method = 'glmnet',
                             metric = my_metric,
                             preProcess = c('center', 'scale'),
                             trControl = my_ctrl,
                             tuneGrid = enet_grid_01)
enet_tune_01$bestTune
##    alpha      lambda
## 78  0.03 0.003412554

model 09

set.seed(1234)

enet_default_09 <- caret::train(y ~ Lightness + Saturation + R * G * B * Hue + I(R^2) + I(G^2) + I(B^2) + I(Hue^2), 
                             data = dfii,
                             method = 'glmnet',
                             metric = my_metric,
                             preProcess = c('center', 'scale'),
                             trControl = my_ctrl)

enet_default_09$bestTune
##   alpha      lambda
## 1   0.1 0.002331673
lambda_seq <- exp(seq(log(min(enet_default_09$results$lambda)),
                          log(max(enet_default_09$results$lambda)),
                          length.out = 50))

enet_grid_09 <- expand.grid(alpha = seq(0.03, 0.12, by = .01), lambda = lambda_seq)

set.seed(1234)

enet_tune_09 <- caret::train(y ~ Lightness + Saturation + R * G * B * Hue + I(R^2) + I(G^2) + I(B^2) + I(Hue^2), 
                             data = dfii,
                             method = 'glmnet',
                             metric = my_metric,
                             preProcess = c('center', 'scale'),
                             trControl = my_ctrl,
                             tuneGrid = enet_grid_09)
enet_tune_09$bestTune
##    alpha      lambda
## 53  0.04 0.002813845

model 08

set.seed(1234)

enet_default_08 <- caret::train(y ~ (Lightness + Saturation) * (( R + G + B + Hue)^2 + I(R^2) + I(G^2) + I(B^2) + I(Hue^2)), 
                             data = dfii,
                             method = 'glmnet',
                             metric = my_metric,
                             preProcess = c('center', 'scale'),
                             trControl = my_ctrl)

enet_default_08$bestTune
##   alpha      lambda
## 1   0.1 0.002331673
lambda_seq <- exp(seq(log(min(enet_default_08$results$lambda)),
                          log(max(enet_default_08$results$lambda)),
                          length.out = 50))

enet_grid_08 <- expand.grid(alpha = seq(0.03, 0.12, by = .01), lambda = lambda_seq)

set.seed(1234)

enet_tune_08 <- caret::train(y ~ (Lightness + Saturation) * (( R + G + B + Hue)^2 + I(R^2) + I(G^2) + I(B^2) + I(Hue^2)), 
                             data = dfii,
                             method = 'glmnet',
                             metric = my_metric,
                             preProcess = c('center', 'scale'),
                             trControl = my_ctrl,
                             tuneGrid = enet_grid_08)

enet_tune_08$bestTune
##    alpha      lambda
## 53  0.04 0.002813845
enet_tune_08$results
##     alpha      lambda       RMSE  Rsquared        MAE      RMSESD   RsquaredSD
## 1    0.03 0.002331673 0.06469133 0.9970976 0.04831645 0.008261880 0.0007151921
## 2    0.03 0.002561438 0.06469133 0.9970976 0.04831645 0.008261880 0.0007151921
## 3    0.03 0.002813845 0.06469133 0.9970976 0.04831645 0.008261880 0.0007151921
## 4    0.03 0.003091125 0.06469133 0.9970976 0.04831645 0.008261880 0.0007151921
## 5    0.03 0.003395728 0.06469133 0.9970976 0.04831645 0.008261880 0.0007151921
## 6    0.03 0.003730347 0.06469133 0.9970976 0.04831645 0.008261880 0.0007151921
## 7    0.03 0.004097939 0.06475364 0.9970925 0.04837121 0.008239364 0.0007131218
## 8    0.03 0.004501755 0.06488398 0.9970815 0.04848669 0.008202778 0.0007098957
## 9    0.03 0.004945363 0.06505613 0.9970668 0.04864269 0.008154320 0.0007057924
## 10   0.03 0.005432685 0.06526198 0.9970494 0.04883199 0.008104553 0.0007009719
## 11   0.03 0.005968028 0.06550002 0.9970292 0.04904412 0.008054690 0.0006955343
## 12   0.03 0.006556124 0.06576161 0.9970069 0.04926500 0.008004048 0.0006898887
## 13   0.03 0.007202172 0.06604178 0.9969829 0.04949372 0.007961838 0.0006848487
## 14   0.03 0.007911882 0.06633855 0.9969571 0.04973025 0.007937011 0.0006814158
## 15   0.03 0.008691528 0.06668191 0.9969264 0.05000907 0.007961344 0.0006835732
## 16   0.03 0.009548001 0.06732573 0.9968678 0.05053250 0.008032438 0.0006923369
## 17   0.03 0.010488872 0.06800293 0.9968067 0.05108916 0.008046858 0.0006951498
## 18   0.03 0.011522457 0.06870501 0.9967414 0.05167610 0.008114937 0.0007030406
## 19   0.03 0.012657894 0.06955303 0.9966615 0.05238087 0.008243988 0.0007174494
## 20   0.03 0.013905217 0.07037429 0.9965833 0.05305938 0.008326494 0.0007278607
## 21   0.03 0.015275453 0.07107525 0.9965162 0.05361570 0.008397099 0.0007360847
## 22   0.03 0.016780713 0.07173281 0.9964529 0.05415292 0.008487554 0.0007449029
## 23   0.03 0.018434304 0.07237017 0.9963911 0.05467640 0.008575698 0.0007529826
## 24   0.03 0.020250841 0.07298426 0.9963314 0.05517455 0.008648617 0.0007597649
## 25   0.03 0.022246382 0.07359752 0.9962713 0.05565850 0.008756934 0.0007699943
## 26   0.03 0.024438566 0.07423341 0.9962088 0.05614753 0.008862872 0.0007795367
## 27   0.03 0.026846770 0.07492348 0.9961405 0.05665576 0.008970189 0.0007898898
## 28   0.03 0.029492282 0.07564245 0.9960688 0.05717438 0.009081163 0.0008011827
## 29   0.03 0.032398485 0.07637916 0.9959949 0.05767260 0.009197069 0.0008127764
## 30   0.03 0.035591069 0.07715558 0.9959171 0.05816467 0.009312992 0.0008243812
## 31   0.03 0.039098253 0.07798333 0.9958341 0.05866270 0.009449550 0.0008368006
## 32   0.03 0.042951040 0.07887029 0.9957448 0.05917442 0.009603052 0.0008511588
## 33   0.03 0.047183484 0.07983058 0.9956480 0.05970632 0.009730927 0.0008645438
## 34   0.03 0.051832999 0.08087562 0.9955425 0.06025165 0.009888193 0.0008793951
## 35   0.03 0.056940682 0.08199837 0.9954283 0.06082387 0.010056117 0.0008967142
## 36   0.03 0.062551681 0.08317393 0.9953086 0.06140897 0.010255160 0.0009182984
## 37   0.03 0.068715596 0.08454000 0.9951672 0.06211804 0.010496599 0.0009450340
## 38   0.03 0.075486909 0.08609745 0.9950036 0.06294213 0.010730461 0.0009737421
## 39   0.03 0.082925475 0.08776279 0.9948269 0.06382311 0.010999936 0.0010062546
## 40   0.03 0.091097046 0.08954004 0.9946373 0.06478202 0.011242468 0.0010382843
## 41   0.03 0.100073853 0.09146241 0.9944310 0.06583223 0.011525035 0.0010759047
## 42   0.03 0.109935245 0.09357598 0.9942013 0.06705153 0.011831727 0.0011171873
## 43   0.03 0.120768389 0.09591907 0.9939421 0.06844893 0.012133769 0.0011633467
## 44   0.03 0.132669044 0.09837418 0.9936692 0.06995364 0.012453260 0.0012111924
## 45   0.03 0.145742403 0.10100331 0.9933730 0.07158695 0.012791449 0.0012629898
## 46   0.03 0.160104026 0.10382156 0.9930526 0.07339517 0.013121470 0.0013170598
## 47   0.03 0.175880860 0.10682344 0.9927072 0.07539176 0.013452182 0.0013754914
## 48   0.03 0.193212361 0.11000161 0.9923388 0.07760260 0.013800554 0.0014358709
## 49   0.03 0.212251727 0.11341051 0.9919403 0.08002810 0.014161212 0.0015000165
## 50   0.03 0.233167255 0.11702212 0.9915150 0.08260372 0.014551578 0.0015711800
## 51   0.04 0.002331673 0.06434578 0.9971271 0.04827285 0.008482254 0.0007185286
## 52   0.04 0.002561438 0.06434578 0.9971271 0.04827285 0.008482254 0.0007185286
## 53   0.04 0.002813845 0.06434578 0.9971271 0.04827285 0.008482254 0.0007185286
## 54   0.04 0.003091125 0.06441981 0.9971211 0.04832502 0.008423029 0.0007142749
## 55   0.04 0.003395728 0.06455125 0.9971105 0.04842608 0.008321999 0.0007069920
## 56   0.04 0.003730347 0.06468372 0.9970998 0.04852912 0.008230164 0.0006999996
## 57   0.04 0.004097939 0.06484274 0.9970868 0.04864502 0.008147612 0.0006932029
## 58   0.04 0.004501755 0.06506585 0.9970682 0.04880313 0.008076719 0.0006869343
## 59   0.04 0.004945363 0.06532835 0.9970459 0.04899824 0.008014006 0.0006813712
## 60   0.04 0.005432685 0.06562305 0.9970205 0.04922407 0.007957927 0.0006768883
## 61   0.04 0.005968028 0.06593262 0.9969936 0.04946291 0.007919397 0.0006740885
## 62   0.04 0.006556124 0.06627152 0.9969634 0.04972765 0.007914773 0.0006757677
## 63   0.04 0.007202172 0.06666023 0.9969278 0.05003858 0.007967327 0.0006844705
## 64   0.04 0.007911882 0.06714814 0.9968834 0.05042398 0.008012910 0.0006911499
## 65   0.04 0.008691528 0.06783967 0.9968204 0.05099654 0.008038191 0.0006966698
## 66   0.04 0.009548001 0.06861943 0.9967475 0.05167032 0.008106574 0.0007067219
## 67   0.04 0.010488872 0.06935469 0.9966783 0.05230262 0.008189705 0.0007157470
## 68   0.04 0.011522457 0.07012753 0.9966054 0.05297301 0.008245447 0.0007223625
## 69   0.04 0.012657894 0.07075826 0.9965450 0.05350044 0.008322799 0.0007301280
## 70   0.04 0.013905217 0.07133317 0.9964897 0.05396697 0.008402139 0.0007375859
## 71   0.04 0.015275453 0.07190271 0.9964346 0.05440702 0.008502945 0.0007455987
## 72   0.04 0.016780713 0.07246828 0.9963794 0.05485642 0.008592201 0.0007540615
## 73   0.04 0.018434304 0.07305463 0.9963219 0.05532690 0.008670970 0.0007610559
## 74   0.04 0.020250841 0.07366406 0.9962617 0.05581267 0.008761015 0.0007696828
## 75   0.04 0.022246382 0.07432874 0.9961956 0.05632453 0.008873265 0.0007798014
## 76   0.04 0.024438566 0.07501312 0.9961273 0.05683575 0.008978684 0.0007896267
## 77   0.04 0.026846770 0.07573066 0.9960559 0.05734978 0.009062715 0.0007975186
## 78   0.04 0.029492282 0.07647109 0.9959815 0.05784901 0.009176572 0.0008091245
## 79   0.04 0.032398485 0.07722404 0.9959056 0.05832678 0.009298081 0.0008209874
## 80   0.04 0.035591069 0.07803870 0.9958234 0.05882226 0.009423555 0.0008323850
## 81   0.04 0.039098253 0.07895688 0.9957298 0.05936800 0.009564534 0.0008461421
## 82   0.04 0.042951040 0.07998569 0.9956239 0.05996086 0.009715191 0.0008610001
## 83   0.04 0.047183484 0.08101113 0.9955192 0.06049000 0.009872476 0.0008773812
## 84   0.04 0.051832999 0.08208157 0.9954097 0.06101179 0.010103233 0.0009011023
## 85   0.04 0.056940682 0.08320820 0.9952954 0.06152979 0.010300118 0.0009222127
## 86   0.04 0.062551681 0.08442055 0.9951726 0.06210111 0.010491548 0.0009401433
## 87   0.04 0.068715596 0.08583528 0.9950259 0.06282401 0.010753382 0.0009677290
## 88   0.04 0.075486909 0.08739317 0.9948632 0.06364379 0.010965116 0.0009900960
## 89   0.04 0.082925475 0.08903411 0.9946904 0.06451960 0.011200444 0.0010185264
## 90   0.04 0.091097046 0.09084925 0.9944988 0.06551424 0.011485257 0.0010519001
## 91   0.04 0.100073853 0.09278169 0.9942949 0.06659743 0.011715324 0.0010810017
## 92   0.04 0.109935245 0.09476850 0.9940848 0.06776117 0.012020034 0.0011163142
## 93   0.04 0.120768389 0.09697151 0.9938495 0.06911267 0.012311410 0.0011532262
## 94   0.04 0.132669044 0.09932890 0.9935954 0.07060709 0.012618902 0.0011945841
## 95   0.04 0.145742403 0.10186992 0.9933197 0.07226225 0.012968620 0.0012419858
## 96   0.04 0.160104026 0.10464836 0.9930151 0.07409249 0.013341021 0.0012969680
## 97   0.04 0.175880860 0.10768922 0.9926777 0.07614422 0.013732890 0.0013582646
## 98   0.04 0.193212361 0.11094358 0.9923138 0.07841133 0.014106815 0.0014231530
## 99   0.04 0.212251727 0.11444213 0.9919178 0.08093087 0.014501349 0.0014937678
## 100  0.04 0.233167255 0.11823264 0.9914837 0.08367532 0.014917187 0.0015720254
## 101  0.05 0.002331673 0.06599745 0.9969818 0.04963519 0.008091300 0.0007098718
## 102  0.05 0.002561438 0.06602428 0.9969798 0.04965176 0.008064375 0.0007077651
## 103  0.05 0.002813845 0.06606825 0.9969761 0.04968186 0.008045238 0.0007065042
## 104  0.05 0.003091125 0.06612997 0.9969709 0.04972540 0.008023859 0.0007044474
## 105  0.05 0.003395728 0.06619682 0.9969653 0.04977131 0.008003177 0.0007022803
## 106  0.05 0.003730347 0.06626809 0.9969593 0.04982267 0.007982522 0.0006998703
## 107  0.05 0.004097939 0.06634565 0.9969528 0.04987700 0.007965617 0.0006977876
## 108  0.05 0.004501755 0.06642619 0.9969459 0.04993325 0.007955470 0.0006960427
## 109  0.05 0.004945363 0.06651275 0.9969384 0.04999205 0.007947710 0.0006944166
## 110  0.05 0.005432685 0.06662503 0.9969287 0.05006444 0.007926683 0.0006915255
## 111  0.05 0.005968028 0.06675902 0.9969171 0.05015266 0.007895290 0.0006881072
## 112  0.05 0.006556124 0.06697096 0.9968984 0.05030813 0.007906626 0.0006876593
## 113  0.05 0.007202172 0.06753036 0.9968473 0.05077383 0.008086236 0.0007012695
## 114  0.05 0.007911882 0.06823725 0.9967822 0.05135319 0.008098162 0.0007025520
## 115  0.05 0.008691528 0.06896331 0.9967149 0.05202077 0.008124520 0.0007073545
## 116  0.05 0.009548001 0.06964351 0.9966501 0.05263200 0.008200716 0.0007168821
## 117  0.05 0.010488872 0.07026779 0.9965911 0.05318459 0.008223317 0.0007203124
## 118  0.05 0.011522457 0.07081931 0.9965381 0.05364650 0.008335262 0.0007294368
## 119  0.05 0.012657894 0.07135380 0.9964865 0.05406004 0.008422962 0.0007364609
## 120  0.05 0.013905217 0.07185256 0.9964380 0.05444180 0.008504849 0.0007435492
## 121  0.05 0.015275453 0.07239734 0.9963847 0.05485109 0.008611972 0.0007536311
## 122  0.05 0.016780713 0.07299284 0.9963261 0.05532754 0.008695805 0.0007603695
## 123  0.05 0.018434304 0.07364642 0.9962615 0.05585836 0.008799177 0.0007693243
## 124  0.05 0.020250841 0.07430506 0.9961960 0.05636927 0.008902381 0.0007795311
## 125  0.05 0.022246382 0.07498425 0.9961283 0.05687850 0.008980316 0.0007872723
## 126  0.05 0.024438566 0.07566466 0.9960606 0.05735333 0.009069558 0.0007952494
## 127  0.05 0.026846770 0.07630035 0.9959972 0.05775526 0.009164997 0.0008044931
## 128  0.05 0.029492282 0.07700627 0.9959266 0.05819048 0.009284266 0.0008154791
## 129  0.05 0.032398485 0.07781705 0.9958446 0.05866912 0.009431321 0.0008291074
## 130  0.05 0.035591069 0.07871840 0.9957527 0.05919032 0.009591654 0.0008451463
## 131  0.05 0.039098253 0.07972159 0.9956497 0.05975180 0.009769208 0.0008636660
## 132  0.05 0.042951040 0.08074327 0.9955451 0.06027815 0.009951459 0.0008821962
## 133  0.05 0.047183484 0.08176965 0.9954407 0.06076243 0.010141032 0.0009003061
## 134  0.05 0.051832999 0.08285937 0.9953301 0.06126446 0.010304600 0.0009159240
## 135  0.05 0.056940682 0.08407021 0.9952066 0.06184138 0.010457395 0.0009299067
## 136  0.05 0.062551681 0.08538090 0.9950721 0.06248948 0.010671923 0.0009518402
## 137  0.05 0.068715596 0.08679432 0.9949266 0.06322470 0.010907073 0.0009764365
## 138  0.05 0.075486909 0.08830705 0.9947706 0.06402878 0.011136104 0.0009998740
## 139  0.05 0.082925475 0.08994124 0.9946005 0.06490908 0.011380280 0.0010265879
## 140  0.05 0.091097046 0.09171479 0.9944153 0.06588035 0.011631348 0.0010555143
## 141  0.05 0.100073853 0.09362466 0.9942153 0.06694782 0.011905996 0.0010878648
## 142  0.05 0.109935245 0.09566984 0.9940013 0.06816869 0.012221274 0.0011242868
## 143  0.05 0.120768389 0.09792021 0.9937634 0.06957544 0.012584526 0.0011690856
## 144  0.05 0.132669044 0.10042109 0.9934948 0.07118747 0.012939611 0.0012174890
## 145  0.05 0.145742403 0.10312171 0.9932019 0.07295712 0.013299653 0.0012699011
## 146  0.05 0.160104026 0.10601626 0.9928860 0.07490088 0.013652811 0.0013246111
## 147  0.05 0.175880860 0.10909823 0.9925479 0.07705657 0.014035496 0.0013853568
## 148  0.05 0.193212361 0.11241231 0.9921815 0.07941359 0.014455983 0.0014558492
## 149  0.05 0.212251727 0.11600648 0.9917814 0.08198735 0.014905317 0.0015346012
## 150  0.05 0.233167255 0.11991603 0.9913420 0.08479580 0.015368550 0.0016200758
## 151  0.06 0.002331673 0.06602278 0.9969886 0.04941966 0.007563829 0.0006431239
## 152  0.06 0.002561438 0.06611745 0.9969797 0.04950308 0.007570636 0.0006450830
## 153  0.06 0.002813845 0.06623065 0.9969693 0.04960837 0.007571432 0.0006463919
## 154  0.06 0.003091125 0.06632374 0.9969605 0.04969565 0.007590679 0.0006500107
## 155  0.06 0.003395728 0.06644922 0.9969487 0.04981232 0.007607122 0.0006531666
## 156  0.06 0.003730347 0.06656775 0.9969376 0.04992154 0.007631107 0.0006570678
## 157  0.06 0.004097939 0.06668564 0.9969265 0.05002891 0.007657050 0.0006612038
## 158  0.06 0.004501755 0.06679490 0.9969161 0.05013256 0.007697403 0.0006669382
## 159  0.06 0.004945363 0.06695141 0.9969014 0.05028095 0.007750499 0.0006733131
## 160  0.06 0.005432685 0.06714537 0.9968834 0.05044851 0.007896301 0.0006854559
## 161  0.06 0.005968028 0.06730687 0.9968680 0.05060760 0.007990098 0.0006955940
## 162  0.06 0.006556124 0.06764251 0.9968365 0.05089391 0.008119790 0.0007071140
## 163  0.06 0.007202172 0.06845384 0.9967618 0.05160845 0.008181530 0.0007118695
## 164  0.06 0.007911882 0.06902702 0.9967090 0.05212574 0.008129115 0.0007103231
## 165  0.06 0.008691528 0.06967411 0.9966471 0.05269914 0.008257297 0.0007212138
## 166  0.06 0.009548001 0.07023437 0.9965941 0.05319590 0.008295085 0.0007254485
## 167  0.06 0.010488872 0.07078441 0.9965412 0.05364472 0.008377449 0.0007321204
## 168  0.06 0.011522457 0.07129681 0.9964913 0.05404522 0.008482651 0.0007408705
## 169  0.06 0.012657894 0.07182049 0.9964399 0.05444373 0.008584286 0.0007501632
## 170  0.06 0.013905217 0.07241434 0.9963812 0.05490674 0.008678387 0.0007585683
## 171  0.06 0.015275453 0.07302105 0.9963215 0.05539766 0.008752197 0.0007646374
## 172  0.06 0.016780713 0.07362670 0.9962614 0.05587272 0.008842403 0.0007733790
## 173  0.06 0.018434304 0.07425578 0.9961986 0.05634394 0.008940841 0.0007831618
## 174  0.06 0.020250841 0.07484683 0.9961398 0.05676153 0.009000271 0.0007896382
## 175  0.06 0.022246382 0.07543054 0.9960819 0.05715132 0.009080600 0.0007969074
## 176  0.06 0.024438566 0.07602455 0.9960232 0.05752811 0.009197609 0.0008072066
## 177  0.06 0.026846770 0.07673839 0.9959513 0.05797472 0.009331806 0.0008203594
## 178  0.06 0.029492282 0.07758495 0.9958654 0.05849610 0.009466136 0.0008336953
## 179  0.06 0.032398485 0.07852664 0.9957692 0.05905735 0.009621772 0.0008490176
## 180  0.06 0.035591069 0.07954945 0.9956639 0.05964251 0.009783569 0.0008659694
## 181  0.06 0.039098253 0.08051941 0.9955652 0.06013775 0.009917051 0.0008783735
## 182  0.06 0.042951040 0.08149988 0.9954658 0.06058246 0.010073395 0.0008915929
## 183  0.06 0.047183484 0.08256780 0.9953569 0.06106842 0.010207269 0.0009041966
## 184  0.06 0.051832999 0.08366278 0.9952462 0.06158260 0.010378338 0.0009208422
## 185  0.06 0.056940682 0.08481650 0.9951303 0.06214566 0.010567508 0.0009360087
## 186  0.06 0.062551681 0.08608842 0.9950018 0.06278100 0.010800635 0.0009569520
## 187  0.06 0.068715596 0.08749228 0.9948588 0.06348639 0.011020333 0.0009790133
## 188  0.06 0.075486909 0.08899714 0.9947051 0.06428019 0.011246827 0.0010014661
## 189  0.06 0.082925475 0.09063926 0.9945360 0.06517749 0.011516199 0.0010308975
## 190  0.06 0.091097046 0.09246986 0.9943456 0.06623117 0.011826741 0.0010651070
## 191  0.06 0.100073853 0.09449794 0.9941328 0.06742253 0.012155910 0.0011037005
## 192  0.06 0.109935245 0.09670719 0.9938998 0.06875474 0.012494715 0.0011442902
## 193  0.06 0.120768389 0.09911346 0.9936435 0.07029468 0.012868094 0.0011917414
## 194  0.06 0.132669044 0.10174091 0.9933598 0.07201514 0.013238154 0.0012423063
## 195  0.06 0.145742403 0.10452543 0.9930577 0.07384551 0.013603040 0.0012974234
## 196  0.06 0.160104026 0.10747476 0.9927377 0.07588663 0.013983449 0.0013578085
## 197  0.06 0.175880860 0.11065581 0.9923923 0.07815043 0.014418624 0.0014265708
## 198  0.06 0.193212361 0.11411765 0.9920133 0.08056964 0.014871175 0.0015022157
## 199  0.06 0.212251727 0.11786257 0.9915997 0.08318840 0.015334734 0.0015853880
## 200  0.06 0.233167255 0.12189773 0.9911518 0.08605963 0.015797891 0.0016745397
## 201  0.07 0.002331673 0.06612137 0.9969781 0.04953104 0.007817640 0.0006656349
## 202  0.07 0.002561438 0.06616965 0.9969737 0.04957278 0.007803131 0.0006651802
## 203  0.07 0.002813845 0.06621951 0.9969691 0.04961458 0.007789929 0.0006648916
## 204  0.07 0.003091125 0.06627128 0.9969643 0.04965654 0.007777268 0.0006647276
## 205  0.07 0.003395728 0.06632642 0.9969592 0.04970156 0.007765852 0.0006646424
## 206  0.07 0.003730347 0.06641224 0.9969512 0.04978470 0.007773234 0.0006654805
## 207  0.07 0.004097939 0.06651188 0.9969417 0.04988280 0.007798032 0.0006684866
## 208  0.07 0.004501755 0.06666180 0.9969277 0.05000662 0.007873719 0.0006752304
## 209  0.07 0.004945363 0.06689524 0.9969044 0.05023632 0.008035583 0.0006950039
## 210  0.07 0.005432685 0.06724710 0.9968712 0.05057148 0.008160660 0.0007088048
## 211  0.07 0.005968028 0.06778992 0.9968226 0.05104959 0.008127031 0.0007098763
## 212  0.07 0.006556124 0.06843141 0.9967644 0.05163841 0.008143265 0.0007082619
## 213  0.07 0.007202172 0.06905435 0.9967057 0.05220010 0.008162190 0.0007132757
## 214  0.07 0.007911882 0.06967379 0.9966471 0.05274207 0.008240185 0.0007206087
## 215  0.07 0.008691528 0.07021778 0.9965951 0.05320744 0.008317084 0.0007270094
## 216  0.07 0.009548001 0.07076055 0.9965425 0.05364820 0.008412619 0.0007349014
## 217  0.07 0.010488872 0.07125137 0.9964947 0.05402042 0.008489388 0.0007423414
## 218  0.07 0.011522457 0.07181447 0.9964394 0.05444734 0.008598158 0.0007521709
## 219  0.07 0.012657894 0.07238322 0.9963834 0.05489409 0.008656014 0.0007573683
## 220  0.07 0.013905217 0.07295714 0.9963265 0.05535403 0.008722185 0.0007634652
## 221  0.07 0.015275453 0.07354572 0.9962676 0.05580146 0.008814622 0.0007721347
## 222  0.07 0.016780713 0.07412682 0.9962097 0.05621426 0.008885228 0.0007788746
## 223  0.07 0.018434304 0.07469651 0.9961530 0.05660842 0.008959421 0.0007861081
## 224  0.07 0.020250841 0.07525537 0.9960977 0.05698338 0.009057574 0.0007956502
## 225  0.07 0.022246382 0.07585182 0.9960387 0.05736943 0.009169251 0.0008066654
## 226  0.07 0.024438566 0.07656039 0.9959678 0.05782690 0.009304066 0.0008185226
## 227  0.07 0.026846770 0.07740513 0.9958821 0.05836042 0.009443917 0.0008317912
## 228  0.07 0.029492282 0.07840265 0.9957790 0.05897594 0.009618125 0.0008491306
## 229  0.07 0.032398485 0.07942030 0.9956742 0.05956770 0.009720600 0.0008604750
## 230  0.07 0.035591069 0.08031375 0.9955837 0.06000058 0.009830362 0.0008697570
## 231  0.07 0.039098253 0.08124529 0.9954889 0.06041402 0.009942743 0.0008798949
## 232  0.07 0.042951040 0.08221867 0.9953905 0.06087064 0.010105133 0.0008929247
## 233  0.07 0.047183484 0.08322177 0.9952898 0.06134719 0.010267893 0.0009062638
## 234  0.07 0.051832999 0.08427383 0.9951848 0.06183901 0.010482143 0.0009238223
## 235  0.07 0.056940682 0.08543029 0.9950702 0.06238168 0.010676276 0.0009397898
## 236  0.07 0.062551681 0.08672574 0.9949410 0.06302533 0.010869751 0.0009564904
## 237  0.07 0.068715596 0.08810992 0.9948017 0.06373288 0.011132968 0.0009813097
## 238  0.07 0.075486909 0.08964941 0.9946452 0.06457386 0.011410884 0.0010077414
## 239  0.07 0.082925475 0.09140004 0.9944636 0.06558534 0.011727221 0.0010416101
## 240  0.07 0.091097046 0.09336444 0.9942577 0.06673917 0.012040668 0.0010773610
## 241  0.07 0.100073853 0.09552366 0.9940290 0.06802582 0.012396760 0.0011198009
## 242  0.07 0.109935245 0.09786501 0.9937800 0.06946474 0.012766315 0.0011643068
## 243  0.07 0.120768389 0.10036740 0.9935118 0.07107692 0.013173796 0.0012193576
## 244  0.07 0.132669044 0.10308972 0.9932171 0.07286831 0.013538150 0.0012721559
## 245  0.07 0.145742403 0.10592823 0.9929121 0.07479546 0.013919656 0.0013293807
## 246  0.07 0.160104026 0.10898381 0.9925838 0.07692166 0.014386505 0.0013992072
## 247  0.07 0.175880860 0.11233348 0.9922192 0.07927386 0.014837174 0.0014743433
## 248  0.07 0.193212361 0.11597074 0.9918209 0.08180541 0.015302054 0.0015570969
## 249  0.07 0.212251727 0.11988129 0.9913917 0.08452779 0.015752251 0.0016430901
## 250  0.07 0.233167255 0.12408848 0.9909292 0.08753082 0.016171684 0.0017320212
## 251  0.08 0.002331673 0.06540228 0.9970398 0.04905608 0.008159286 0.0007038839
## 252  0.08 0.002561438 0.06552516 0.9970292 0.04915486 0.008120710 0.0007001275
## 253  0.08 0.002813845 0.06568983 0.9970150 0.04928526 0.008075543 0.0006958692
## 254  0.08 0.003091125 0.06585061 0.9970009 0.04940661 0.008043689 0.0006928184
## 255  0.08 0.003395728 0.06602215 0.9969856 0.04953473 0.008026308 0.0006909039
## 256  0.08 0.003730347 0.06622089 0.9969680 0.04969136 0.007995982 0.0006879498
## 257  0.08 0.004097939 0.06646926 0.9969462 0.04990221 0.007964757 0.0006849660
## 258  0.08 0.004501755 0.06675436 0.9969202 0.05016849 0.007959621 0.0006857318
## 259  0.08 0.004945363 0.06717827 0.9968793 0.05053837 0.008076469 0.0007035679
## 260  0.08 0.005432685 0.06775667 0.9968256 0.05102232 0.008135539 0.0007109782
## 261  0.08 0.005968028 0.06847941 0.9967605 0.05171246 0.008066147 0.0007036704
## 262  0.08 0.006556124 0.06901161 0.9967098 0.05219179 0.008171773 0.0007168615
## 263  0.08 0.007202172 0.06958915 0.9966556 0.05268474 0.008249955 0.0007212972
## 264  0.08 0.007911882 0.07011654 0.9966054 0.05314027 0.008312999 0.0007260018
## 265  0.08 0.008691528 0.07064633 0.9965537 0.05357187 0.008420835 0.0007353479
## 266  0.08 0.009548001 0.07114233 0.9965053 0.05395102 0.008501990 0.0007424173
## 267  0.08 0.010488872 0.07169173 0.9964513 0.05437073 0.008592301 0.0007504858
## 268  0.08 0.011522457 0.07222679 0.9963986 0.05477859 0.008670318 0.0007578489
## 269  0.08 0.012657894 0.07278954 0.9963424 0.05521174 0.008735262 0.0007643609
## 270  0.08 0.013905217 0.07337058 0.9962843 0.05564600 0.008806676 0.0007716484
## 271  0.08 0.015275453 0.07396058 0.9962250 0.05608067 0.008896602 0.0007808646
## 272  0.08 0.016780713 0.07453183 0.9961676 0.05648016 0.008980839 0.0007898439
## 273  0.08 0.018434304 0.07510653 0.9961106 0.05687358 0.009055951 0.0007971203
## 274  0.08 0.020250841 0.07571244 0.9960507 0.05727809 0.009155105 0.0008067503
## 275  0.08 0.022246382 0.07638252 0.9959842 0.05770316 0.009279810 0.0008179515
## 276  0.08 0.024438566 0.07722920 0.9958982 0.05824720 0.009426767 0.0008304094
## 277  0.08 0.026846770 0.07820104 0.9957980 0.05886034 0.009553771 0.0008434462
## 278  0.08 0.029492282 0.07917899 0.9956969 0.05942298 0.009652648 0.0008543453
## 279  0.08 0.032398485 0.08001575 0.9956122 0.05982200 0.009745541 0.0008616317
## 280  0.08 0.035591069 0.08087638 0.9955253 0.06021482 0.009870375 0.0008719495
## 281  0.08 0.039098253 0.08175600 0.9954372 0.06063088 0.010021810 0.0008835896
## 282  0.08 0.042951040 0.08269281 0.9953437 0.06105697 0.010187084 0.0008961977
## 283  0.08 0.047183484 0.08371247 0.9952419 0.06151452 0.010367445 0.0009104129
## 284  0.08 0.051832999 0.08480353 0.9951337 0.06200826 0.010545794 0.0009242729
## 285  0.08 0.056940682 0.08597817 0.9950175 0.06258245 0.010748156 0.0009403672
## 286  0.08 0.062551681 0.08727611 0.9948886 0.06326164 0.011009019 0.0009623845
## 287  0.08 0.068715596 0.08872837 0.9947421 0.06404774 0.011296236 0.0009888688
## 288  0.08 0.075486909 0.09037166 0.9945732 0.06496592 0.011591625 0.0010187218
## 289  0.08 0.082925475 0.09223233 0.9943793 0.06603872 0.011919396 0.0010546124
## 290  0.08 0.091097046 0.09431118 0.9941596 0.06726403 0.012280611 0.0010963403
## 291  0.08 0.100073853 0.09656979 0.9939190 0.06862568 0.012643001 0.0011407982
## 292  0.08 0.109935245 0.09900091 0.9936592 0.07014717 0.013031878 0.0011905348
## 293  0.08 0.120768389 0.10160167 0.9933806 0.07182908 0.013440095 0.0012441732
## 294  0.08 0.132669044 0.10437139 0.9930837 0.07370031 0.013855710 0.0013023187
## 295  0.08 0.145742403 0.10734614 0.9927645 0.07577738 0.014296918 0.0013683976
## 296  0.08 0.160104026 0.11058474 0.9924140 0.07802877 0.014762359 0.0014426608
## 297  0.08 0.175880860 0.11408562 0.9920335 0.08043973 0.015223017 0.0015219700
## 298  0.08 0.193212361 0.11781969 0.9916285 0.08301066 0.015652759 0.0016027948
## 299  0.08 0.212251727 0.12165344 0.9912224 0.08572912 0.015956923 0.0016705477
## 300  0.08 0.233167255 0.12545817 0.9908456 0.08850224 0.016223528 0.0017290777
## 301  0.09 0.002331673 0.06450885 0.9971127 0.04837535 0.008196725 0.0007038527
## 302  0.09 0.002561438 0.06475829 0.9970917 0.04858027 0.008160155 0.0007022357
## 303  0.09 0.002813845 0.06501201 0.9970710 0.04878145 0.008074917 0.0006964606
## 304  0.09 0.003091125 0.06527306 0.9970491 0.04898692 0.008006119 0.0006926078
## 305  0.09 0.003395728 0.06560081 0.9970204 0.04924579 0.007984734 0.0006935216
## 306  0.09 0.003730347 0.06600725 0.9969846 0.04957253 0.007968207 0.0006939355
## 307  0.09 0.004097939 0.06647671 0.9969428 0.04995438 0.007955978 0.0006944053
## 308  0.09 0.004501755 0.06702724 0.9968931 0.05043546 0.007996410 0.0006985259
## 309  0.09 0.004945363 0.06771564 0.9968301 0.05104785 0.008080197 0.0007051509
## 310  0.09 0.005432685 0.06840034 0.9967668 0.05166704 0.008151336 0.0007120400
## 311  0.09 0.005968028 0.06893095 0.9967170 0.05213453 0.008229353 0.0007193280
## 312  0.09 0.006556124 0.06946373 0.9966667 0.05259205 0.008276857 0.0007239022
## 313  0.09 0.007202172 0.06997689 0.9966182 0.05302494 0.008343250 0.0007287593
## 314  0.09 0.007911882 0.07047994 0.9965696 0.05344150 0.008427799 0.0007357939
## 315  0.09 0.008691528 0.07098573 0.9965200 0.05383792 0.008507092 0.0007429860
## 316  0.09 0.009548001 0.07149538 0.9964697 0.05422345 0.008598959 0.0007511442
## 317  0.09 0.010488872 0.07204353 0.9964155 0.05462864 0.008681927 0.0007590260
## 318  0.09 0.011522457 0.07261606 0.9963588 0.05505809 0.008748189 0.0007655494
## 319  0.09 0.012657894 0.07319392 0.9963011 0.05549559 0.008830261 0.0007736313
## 320  0.09 0.013905217 0.07378927 0.9962412 0.05593664 0.008910537 0.0007818160
## 321  0.09 0.015275453 0.07439274 0.9961806 0.05636674 0.008983524 0.0007895651
## 322  0.09 0.016780713 0.07498028 0.9961220 0.05677520 0.009059795 0.0007975720
## 323  0.09 0.018434304 0.07558461 0.9960618 0.05718110 0.009147477 0.0008061893
## 324  0.09 0.020250841 0.07624365 0.9959962 0.05760335 0.009246672 0.0008150904
## 325  0.09 0.022246382 0.07700945 0.9959191 0.05809050 0.009352480 0.0008243512
## 326  0.09 0.024438566 0.07794587 0.9958228 0.05868649 0.009434784 0.0008319954
## 327  0.09 0.026846770 0.07887639 0.9957266 0.05922913 0.009518783 0.0008414413
## 328  0.09 0.029492282 0.07967560 0.9956455 0.05963326 0.009632989 0.0008517086
## 329  0.09 0.032398485 0.08046040 0.9955670 0.06000470 0.009758007 0.0008605140
## 330  0.09 0.035591069 0.08129369 0.9954837 0.06039580 0.009915162 0.0008725711
## 331  0.09 0.039098253 0.08219812 0.9953935 0.06080047 0.010082662 0.0008854428
## 332  0.09 0.042951040 0.08318572 0.9952951 0.06123359 0.010241693 0.0008970376
## 333  0.09 0.047183484 0.08422230 0.9951924 0.06169823 0.010422126 0.0009110963
## 334  0.09 0.051832999 0.08532880 0.9950830 0.06220846 0.010633694 0.0009266155
## 335  0.09 0.056940682 0.08653543 0.9949634 0.06283832 0.010884056 0.0009459823
## 336  0.09 0.062551681 0.08789117 0.9948280 0.06357285 0.011163940 0.0009697664
## 337  0.09 0.068715596 0.08944376 0.9946699 0.06443223 0.011463905 0.0009983773
## 338  0.09 0.075486909 0.09119400 0.9944888 0.06540613 0.011781453 0.0010311566
## 339  0.09 0.082925475 0.09316648 0.9942813 0.06654565 0.012144104 0.0010714880
## 340  0.09 0.091097046 0.09533704 0.9940508 0.06783612 0.012513782 0.0011145132
## 341  0.09 0.100073853 0.09768910 0.9937997 0.06926453 0.012916491 0.0011644322
## 342  0.09 0.109935245 0.10020104 0.9935306 0.07086426 0.013331664 0.0012168481
## 343  0.09 0.120768389 0.10287267 0.9932448 0.07263564 0.013764766 0.0012745794
## 344  0.09 0.132669044 0.10576896 0.9929333 0.07464046 0.014232043 0.0013424909
## 345  0.09 0.145742403 0.10886791 0.9925993 0.07680946 0.014696916 0.0014148688
## 346  0.09 0.160104026 0.11213695 0.9922486 0.07905130 0.015144322 0.0014911012
## 347  0.09 0.175880860 0.11551114 0.9918936 0.08133330 0.015477327 0.0015540461
## 348  0.09 0.193212361 0.11892057 0.9915550 0.08371203 0.015744789 0.0016111101
## 349  0.09 0.212251727 0.12237427 0.9912345 0.08616955 0.016060028 0.0016674040
## 350  0.09 0.233167255 0.12605835 0.9909060 0.08885904 0.016384702 0.0017229815
## 351  0.10 0.002331673 0.06503981 0.9970679 0.04865440 0.007756012 0.0006672514
## 352  0.10 0.002561438 0.06524769 0.9970496 0.04883565 0.007747857 0.0006695842
## 353  0.10 0.002813845 0.06547366 0.9970294 0.04903602 0.007759370 0.0006739406
## 354  0.10 0.003091125 0.06574939 0.9970050 0.04928570 0.007771806 0.0006782240
## 355  0.10 0.003395728 0.06609144 0.9969747 0.04959712 0.007822428 0.0006843920
## 356  0.10 0.003730347 0.06650349 0.9969380 0.04996992 0.007906292 0.0006920533
## 357  0.10 0.004097939 0.06699175 0.9968943 0.05041511 0.008001629 0.0007004432
## 358  0.10 0.004501755 0.06761647 0.9968381 0.05098900 0.008089839 0.0007080987
## 359  0.10 0.004945363 0.06820530 0.9967840 0.05152636 0.008186193 0.0007161074
## 360  0.10 0.005432685 0.06873230 0.9967348 0.05199447 0.008254574 0.0007224822
## 361  0.10 0.005968028 0.06928405 0.9966831 0.05247647 0.008296608 0.0007267970
## 362  0.10 0.006556124 0.06977639 0.9966362 0.05289698 0.008381846 0.0007339322
## 363  0.10 0.007202172 0.07026389 0.9965891 0.05330272 0.008462654 0.0007407980
## 364  0.10 0.007911882 0.07077470 0.9965393 0.05370269 0.008538173 0.0007471112
## 365  0.10 0.008691528 0.07129029 0.9964886 0.05408441 0.008623062 0.0007546825
## 366  0.10 0.009548001 0.07182245 0.9964364 0.05447459 0.008690736 0.0007609289
## 367  0.10 0.010488872 0.07237986 0.9963815 0.05488364 0.008751294 0.0007663946
## 368  0.10 0.011522457 0.07295106 0.9963246 0.05530719 0.008846978 0.0007751474
## 369  0.10 0.012657894 0.07354229 0.9962654 0.05574241 0.008925256 0.0007828314
## 370  0.10 0.013905217 0.07416631 0.9962026 0.05619523 0.008994579 0.0007910419
## 371  0.10 0.015275453 0.07477966 0.9961413 0.05662372 0.009069875 0.0007994425
## 372  0.10 0.016780713 0.07540290 0.9960794 0.05703814 0.009134662 0.0008056544
## 373  0.10 0.018434304 0.07605873 0.9960140 0.05746715 0.009212637 0.0008128068
## 374  0.10 0.020250841 0.07678546 0.9959411 0.05793566 0.009287513 0.0008190468
## 375  0.10 0.022246382 0.07762355 0.9958557 0.05846612 0.009371357 0.0008256062
## 376  0.10 0.024438566 0.07848122 0.9957670 0.05898964 0.009456095 0.0008341052
## 377  0.10 0.026846770 0.07926386 0.9956874 0.05940741 0.009566884 0.0008446035
## 378  0.10 0.029492282 0.08001696 0.9956119 0.05976847 0.009703059 0.0008552940
## 379  0.10 0.032398485 0.08081854 0.9955320 0.06014681 0.009832328 0.0008641931
## 380  0.10 0.035591069 0.08171702 0.9954419 0.06056320 0.009986313 0.0008761189
## 381  0.10 0.039098253 0.08267462 0.9953465 0.06098410 0.010135911 0.0008866180
## 382  0.10 0.042951040 0.08367018 0.9952472 0.06141811 0.010307593 0.0008993057
## 383  0.10 0.047183484 0.08471245 0.9951441 0.06188965 0.010521699 0.0009145196
## 384  0.10 0.051832999 0.08585847 0.9950302 0.06244517 0.010767122 0.0009330131
## 385  0.10 0.056940682 0.08713496 0.9949026 0.06313441 0.011035481 0.0009548786
## 386  0.10 0.062551681 0.08857866 0.9947565 0.06392688 0.011332501 0.0009820033
## 387  0.10 0.068715596 0.09022328 0.9945877 0.06485192 0.011650893 0.0010122111
## 388  0.10 0.075486909 0.09207501 0.9943952 0.06588969 0.012005197 0.0010485653
## 389  0.10 0.082925475 0.09415511 0.9941755 0.06709475 0.012371965 0.0010891659
## 390  0.10 0.091097046 0.09643176 0.9939321 0.06845965 0.012783992 0.0011391886
## 391  0.10 0.100073853 0.09884153 0.9936753 0.06994466 0.013208549 0.0011916792
## 392  0.10 0.109935245 0.10137125 0.9934050 0.07158686 0.013637361 0.0012464227
## 393  0.10 0.120768389 0.10408596 0.9931152 0.07343918 0.014103642 0.0013106820
## 394  0.10 0.132669044 0.10696850 0.9928076 0.07545967 0.014533747 0.0013771322
## 395  0.10 0.145742403 0.10998404 0.9924927 0.07754147 0.014921862 0.0014408992
## 396  0.10 0.160104026 0.11305549 0.9921826 0.07957776 0.015290439 0.0015027857
## 397  0.10 0.175880860 0.11619236 0.9918834 0.08171315 0.015580004 0.0015560725
## 398  0.10 0.193212361 0.11941857 0.9915934 0.08397107 0.015899402 0.0016076564
## 399  0.10 0.212251727 0.12289838 0.9912893 0.08644771 0.016223454 0.0016609059
## 400  0.10 0.233167255 0.12669266 0.9909694 0.08922393 0.016547997 0.0017167639
## 401  0.11 0.002331673 0.06516620 0.9970601 0.04881280 0.008306366 0.0007013016
## 402  0.11 0.002561438 0.06532288 0.9970469 0.04895033 0.008255644 0.0006972261
## 403  0.11 0.002813845 0.06556478 0.9970263 0.04915471 0.008180650 0.0006920659
## 404  0.11 0.003091125 0.06590468 0.9969959 0.04944915 0.008148009 0.0006940055
## 405  0.11 0.003395728 0.06628979 0.9969605 0.04978977 0.008138925 0.0007001948
## 406  0.11 0.003730347 0.06672013 0.9969207 0.05017185 0.008110257 0.0007039408
## 407  0.11 0.004097939 0.06733848 0.9968645 0.05071754 0.008098311 0.0007053304
## 408  0.11 0.004501755 0.06798312 0.9968053 0.05129767 0.008124815 0.0007087196
## 409  0.11 0.004945363 0.06852457 0.9967547 0.05179516 0.008194474 0.0007164497
## 410  0.11 0.005432685 0.06905446 0.9967051 0.05226617 0.008270843 0.0007231166
## 411  0.11 0.005968028 0.06955288 0.9966579 0.05268879 0.008348804 0.0007300457
## 412  0.11 0.006556124 0.07002656 0.9966123 0.05309169 0.008419214 0.0007359995
## 413  0.11 0.007202172 0.07052419 0.9965639 0.05350132 0.008495278 0.0007420767
## 414  0.11 0.007911882 0.07105006 0.9965123 0.05389878 0.008575131 0.0007495537
## 415  0.11 0.008691528 0.07157899 0.9964604 0.05428566 0.008643783 0.0007554689
## 416  0.11 0.009548001 0.07212537 0.9964066 0.05467395 0.008700322 0.0007605969
## 417  0.11 0.010488872 0.07268681 0.9963509 0.05508328 0.008772934 0.0007673383
## 418  0.11 0.011522457 0.07327605 0.9962920 0.05551497 0.008857333 0.0007757087
## 419  0.11 0.012657894 0.07391812 0.9962274 0.05598645 0.008939114 0.0007849437
## 420  0.11 0.013905217 0.07453996 0.9961652 0.05643063 0.008999598 0.0007920209
## 421  0.11 0.015275453 0.07515794 0.9961038 0.05684328 0.009063295 0.0007981487
## 422  0.11 0.016780713 0.07580193 0.9960397 0.05725948 0.009127217 0.0008039049
## 423  0.11 0.018434304 0.07651250 0.9959682 0.05771611 0.009214459 0.0008107003
## 424  0.11 0.020250841 0.07727969 0.9958905 0.05821004 0.009284957 0.0008164873
## 425  0.11 0.022246382 0.07809345 0.9958071 0.05874195 0.009348323 0.0008231231
## 426  0.11 0.024438566 0.07882121 0.9957331 0.05915324 0.009453580 0.0008328971
## 427  0.11 0.026846770 0.07953314 0.9956612 0.05950076 0.009597692 0.0008445017
## 428  0.11 0.029492282 0.08031295 0.9955832 0.05986991 0.009753176 0.0008561724
## 429  0.11 0.032398485 0.08119716 0.9954946 0.06028878 0.009889577 0.0008656751
## 430  0.11 0.035591069 0.08213463 0.9954006 0.06072304 0.010039064 0.0008764323
## 431  0.11 0.039098253 0.08310164 0.9953038 0.06115719 0.010213305 0.0008891277
## 432  0.11 0.042951040 0.08410432 0.9952043 0.06159661 0.010410423 0.0009027625
## 433  0.11 0.047183484 0.08520211 0.9950946 0.06211397 0.010647860 0.0009201522
## 434  0.11 0.051832999 0.08641972 0.9949726 0.06272754 0.010902741 0.0009411422
## 435  0.11 0.056940682 0.08779802 0.9948335 0.06346731 0.011174140 0.0009632034
## 436  0.11 0.062551681 0.08934097 0.9946756 0.06434204 0.011499617 0.0009932241
## 437  0.11 0.068715596 0.09106788 0.9944977 0.06531850 0.011848973 0.0010276818
## 438  0.11 0.075486909 0.09302863 0.9942918 0.06643280 0.012216816 0.0010651584
## 439  0.11 0.082925475 0.09515060 0.9940673 0.06763720 0.012621861 0.0011107163
## 440  0.11 0.091097046 0.09742672 0.9938247 0.06900956 0.013068137 0.0011649654
## 441  0.11 0.100073853 0.09980608 0.9935709 0.07053985 0.013488343 0.0012184260
## 442  0.11 0.109935245 0.10229584 0.9933107 0.07220797 0.013901142 0.0012735596
## 443  0.11 0.120768389 0.10498273 0.9930309 0.07404798 0.014376332 0.0013423585
## 444  0.11 0.132669044 0.10782978 0.9927381 0.07600020 0.014751022 0.0014001826
## 445  0.11 0.145742403 0.11067766 0.9924566 0.07790230 0.015110342 0.0014563789
## 446  0.11 0.160104026 0.11360037 0.9921843 0.07986176 0.015413320 0.0015059727
## 447  0.11 0.175880860 0.11661112 0.9919201 0.08191891 0.015726247 0.0015533506
## 448  0.11 0.193212361 0.11988393 0.9916409 0.08421548 0.016058948 0.0016041527
## 449  0.11 0.212251727 0.12348457 0.9913423 0.08675920 0.016394496 0.0016576243
## 450  0.11 0.233167255 0.12742178 0.9910252 0.08963382 0.016717016 0.0017132638
## 451  0.12 0.002331673 0.06479223 0.9970910 0.04862001 0.008446027 0.0007124853
## 452  0.12 0.002561438 0.06509334 0.9970652 0.04885646 0.008372732 0.0007088632
## 453  0.12 0.002813845 0.06547524 0.9970320 0.04916363 0.008280633 0.0007057736
## 454  0.12 0.003091125 0.06593987 0.9969909 0.04954239 0.008211528 0.0007047992
## 455  0.12 0.003395728 0.06642913 0.9969478 0.04992908 0.008140355 0.0007014028
## 456  0.12 0.003730347 0.06698459 0.9968975 0.05039443 0.008088852 0.0007014168
## 457  0.12 0.004097939 0.06768893 0.9968324 0.05103262 0.008151196 0.0007099059
## 458  0.12 0.004501755 0.06834310 0.9967713 0.05162021 0.008248120 0.0007194319
## 459  0.12 0.004945363 0.06886102 0.9967231 0.05207007 0.008284846 0.0007226657
## 460  0.12 0.005432685 0.06933889 0.9966781 0.05247885 0.008337511 0.0007267780
## 461  0.12 0.005968028 0.06980403 0.9966335 0.05287235 0.008417258 0.0007334903
## 462  0.12 0.006556124 0.07028255 0.9965868 0.05326613 0.008501593 0.0007408942
## 463  0.12 0.007202172 0.07077851 0.9965385 0.05365048 0.008578979 0.0007479575
## 464  0.12 0.007911882 0.07129552 0.9964881 0.05402778 0.008642524 0.0007539556
## 465  0.12 0.008691528 0.07182116 0.9964364 0.05440741 0.008705673 0.0007598689
## 466  0.12 0.009548001 0.07236735 0.9963823 0.05480469 0.008779029 0.0007667374
## 467  0.12 0.010488872 0.07296251 0.9963230 0.05523576 0.008863961 0.0007751113
## 468  0.12 0.011522457 0.07361923 0.9962571 0.05571979 0.008936951 0.0007832647
## 469  0.12 0.012657894 0.07425756 0.9961931 0.05618583 0.008989759 0.0007895270
## 470  0.12 0.013905217 0.07486538 0.9961327 0.05660397 0.009047524 0.0007953646
## 471  0.12 0.015275453 0.07550703 0.9960688 0.05702850 0.009117910 0.0008017851
## 472  0.12 0.016780713 0.07620566 0.9959986 0.05749082 0.009183600 0.0008075854
## 473  0.12 0.018434304 0.07693841 0.9959243 0.05798177 0.009244903 0.0008135155
## 474  0.12 0.020250841 0.07771095 0.9958456 0.05850460 0.009316912 0.0008209033
## 475  0.12 0.022246382 0.07841716 0.9957742 0.05893778 0.009399911 0.0008285948
## 476  0.12 0.024438566 0.07907716 0.9957080 0.05927377 0.009513211 0.0008367500
## 477  0.12 0.026846770 0.07982885 0.9956326 0.05963365 0.009652903 0.0008469294
## 478  0.12 0.029492282 0.08066933 0.9955485 0.06002896 0.009782545 0.0008552501
## 479  0.12 0.032398485 0.08158127 0.9954569 0.06045730 0.009940490 0.0008661295
## 480  0.12 0.035591069 0.08251622 0.9953630 0.06088260 0.010118892 0.0008789684
## 481  0.12 0.039098253 0.08350587 0.9952641 0.06132070 0.010305768 0.0008917751
## 482  0.12 0.042951040 0.08458191 0.9951558 0.06182225 0.010536806 0.0009096706
## 483  0.12 0.047183484 0.08576629 0.9950360 0.06239459 0.010789274 0.0009295300
## 484  0.12 0.051832999 0.08707946 0.9949029 0.06306844 0.011051237 0.0009503857
## 485  0.12 0.056940682 0.08851951 0.9947564 0.06385951 0.011344541 0.0009749588
## 486  0.12 0.062551681 0.09013005 0.9945909 0.06477504 0.011684382 0.0010070711
## 487  0.12 0.068715596 0.09195833 0.9944006 0.06580143 0.012064799 0.0010448748
## 488  0.12 0.075486909 0.09390122 0.9941976 0.06686789 0.012470614 0.0010873660
## 489  0.12 0.082925475 0.09598942 0.9939771 0.06808842 0.012881682 0.0011355775
## 490  0.12 0.091097046 0.09819616 0.9937466 0.06947717 0.013280114 0.0011848596
## 491  0.12 0.100073853 0.10053681 0.9935042 0.07098914 0.013736060 0.0012423608
## 492  0.12 0.109935245 0.10303917 0.9932460 0.07266875 0.014212332 0.0013071279
## 493  0.12 0.120768389 0.10575410 0.9929693 0.07452010 0.014568632 0.0013585570
## 494  0.12 0.132669044 0.10842215 0.9927093 0.07631379 0.014922301 0.0014122548
## 495  0.12 0.145742403 0.11116631 0.9924577 0.07814492 0.015215743 0.0014541658
## 496  0.12 0.160104026 0.11396442 0.9922158 0.08003361 0.015547309 0.0015019328
## 497  0.12 0.175880860 0.11703626 0.9919590 0.08212413 0.015878738 0.0015496538
## 498  0.12 0.193212361 0.12043172 0.9916820 0.08448505 0.016222032 0.0016009619
## 499  0.12 0.212251727 0.12415858 0.9913860 0.08712476 0.016547582 0.0016536854
## 500  0.12 0.233167255 0.12823752 0.9910719 0.09009004 0.016877171 0.0017108909
##           MAESD
## 1   0.005409631
## 2   0.005409631
## 3   0.005409631
## 4   0.005409631
## 5   0.005409631
## 6   0.005409631
## 7   0.005377973
## 8   0.005328718
## 9   0.005270407
## 10  0.005207975
## 11  0.005150800
## 12  0.005100210
## 13  0.005067736
## 14  0.005055014
## 15  0.005099091
## 16  0.005157846
## 17  0.005159865
## 18  0.005183345
## 19  0.005241352
## 20  0.005258061
## 21  0.005274202
## 22  0.005305804
## 23  0.005345681
## 24  0.005372690
## 25  0.005432041
## 26  0.005482529
## 27  0.005515124
## 28  0.005534394
## 29  0.005566259
## 30  0.005596479
## 31  0.005636104
## 32  0.005690036
## 33  0.005733849
## 34  0.005806730
## 35  0.005901141
## 36  0.006013286
## 37  0.006170839
## 38  0.006349378
## 39  0.006551668
## 40  0.006755814
## 41  0.007009178
## 42  0.007276577
## 43  0.007545443
## 44  0.007800346
## 45  0.008111317
## 46  0.008438955
## 47  0.008786700
## 48  0.009070244
## 49  0.009347104
## 50  0.009663411
## 51  0.005728048
## 52  0.005728048
## 53  0.005728048
## 54  0.005691648
## 55  0.005612202
## 56  0.005536279
## 57  0.005474796
## 58  0.005421382
## 59  0.005351495
## 60  0.005275295
## 61  0.005211385
## 62  0.005183537
## 63  0.005210821
## 64  0.005188488
## 65  0.005164212
## 66  0.005242396
## 67  0.005238807
## 68  0.005253366
## 69  0.005272279
## 70  0.005293367
## 71  0.005345191
## 72  0.005386883
## 73  0.005431045
## 74  0.005476554
## 75  0.005525869
## 76  0.005562582
## 77  0.005582300
## 78  0.005624437
## 79  0.005665522
## 80  0.005702171
## 81  0.005733470
## 82  0.005776889
## 83  0.005847067
## 84  0.005980481
## 85  0.006093056
## 86  0.006220788
## 87  0.006403102
## 88  0.006596458
## 89  0.006790521
## 90  0.007019591
## 91  0.007230097
## 92  0.007483532
## 93  0.007733424
## 94  0.008002690
## 95  0.008318328
## 96  0.008658152
## 97  0.009001421
## 98  0.009359880
## 99  0.009715583
## 100 0.010049162
## 101 0.005256084
## 102 0.005237301
## 103 0.005224226
## 104 0.005207965
## 105 0.005193303
## 106 0.005175508
## 107 0.005161436
## 108 0.005151912
## 109 0.005140339
## 110 0.005120717
## 111 0.005098278
## 112 0.005095715
## 113 0.005221501
## 114 0.005219450
## 115 0.005226891
## 116 0.005251438
## 117 0.005224827
## 118 0.005286174
## 119 0.005312549
## 120 0.005355716
## 121 0.005421309
## 122 0.005466753
## 123 0.005514333
## 124 0.005564646
## 125 0.005577189
## 126 0.005591998
## 127 0.005616914
## 128 0.005654142
## 129 0.005704215
## 130 0.005747342
## 131 0.005811766
## 132 0.005902000
## 133 0.006004581
## 134 0.006113974
## 135 0.006226010
## 136 0.006377194
## 137 0.006554482
## 138 0.006740429
## 139 0.006928611
## 140 0.007112314
## 141 0.007318009
## 142 0.007564610
## 143 0.007862096
## 144 0.008173252
## 145 0.008520740
## 146 0.008873410
## 147 0.009201787
## 148 0.009547598
## 149 0.009877266
## 150 0.010187117
## 151 0.004768744
## 152 0.004752860
## 153 0.004724086
## 154 0.004736736
## 155 0.004738084
## 156 0.004758983
## 157 0.004791315
## 158 0.004852904
## 159 0.004915186
## 160 0.005010085
## 161 0.005113906
## 162 0.005232442
## 163 0.005332989
## 164 0.005191883
## 165 0.005306714
## 166 0.005288681
## 167 0.005318984
## 168 0.005362984
## 169 0.005435616
## 170 0.005484072
## 171 0.005512105
## 172 0.005534708
## 173 0.005574831
## 174 0.005579531
## 175 0.005597705
## 176 0.005636489
## 177 0.005686530
## 178 0.005730030
## 179 0.005787429
## 180 0.005843669
## 181 0.005889944
## 182 0.005969037
## 183 0.006046020
## 184 0.006164007
## 185 0.006309063
## 186 0.006495039
## 187 0.006674576
## 188 0.006821578
## 189 0.006984418
## 190 0.007205027
## 191 0.007452028
## 192 0.007717674
## 193 0.008013907
## 194 0.008332937
## 195 0.008675833
## 196 0.008995291
## 197 0.009343036
## 198 0.009676011
## 199 0.010002863
## 200 0.010294733
## 201 0.004993659
## 202 0.004979564
## 203 0.004967672
## 204 0.004956835
## 205 0.004946030
## 206 0.004950084
## 207 0.004975647
## 208 0.005019096
## 209 0.005181904
## 210 0.005262714
## 211 0.005271319
## 212 0.005258264
## 213 0.005262266
## 214 0.005298438
## 215 0.005333070
## 216 0.005366650
## 217 0.005392603
## 218 0.005444256
## 219 0.005462240
## 220 0.005482607
## 221 0.005514335
## 222 0.005523116
## 223 0.005545280
## 224 0.005576641
## 225 0.005624279
## 226 0.005671527
## 227 0.005720426
## 228 0.005786737
## 229 0.005813827
## 230 0.005851283
## 231 0.005895373
## 232 0.005975265
## 233 0.006085769
## 234 0.006258012
## 235 0.006412271
## 236 0.006546439
## 237 0.006709526
## 238 0.006884747
## 239 0.007091769
## 240 0.007304237
## 241 0.007560362
## 242 0.007834159
## 243 0.008153971
## 244 0.008476198
## 245 0.008798720
## 246 0.009159343
## 247 0.009500420
## 248 0.009828176
## 249 0.010129839
## 250 0.010388763
## 251 0.005566493
## 252 0.005516237
## 253 0.005455604
## 254 0.005403713
## 255 0.005358996
## 256 0.005291733
## 257 0.005196819
## 258 0.005129845
## 259 0.005206383
## 260 0.005269595
## 261 0.005181385
## 262 0.005286398
## 263 0.005324254
## 264 0.005330514
## 265 0.005391185
## 266 0.005420629
## 267 0.005461771
## 268 0.005498260
## 269 0.005512012
## 270 0.005528046
## 271 0.005558002
## 272 0.005583158
## 273 0.005604391
## 274 0.005649733
## 275 0.005691323
## 276 0.005742608
## 277 0.005800717
## 278 0.005818073
## 279 0.005844526
## 280 0.005890530
## 281 0.005965151
## 282 0.006063727
## 283 0.006188045
## 284 0.006315255
## 285 0.006439975
## 286 0.006594438
## 287 0.006770792
## 288 0.006972550
## 289 0.007165526
## 290 0.007401561
## 291 0.007668121
## 292 0.007950461
## 293 0.008287185
## 294 0.008626545
## 295 0.008950812
## 296 0.009302215
## 297 0.009646663
## 298 0.009947044
## 299 0.010154261
## 300 0.010352688
## 301 0.005094300
## 302 0.005084418
## 303 0.005028189
## 304 0.004990740
## 305 0.005005083
## 306 0.005040762
## 307 0.005094742
## 308 0.005171788
## 309 0.005235900
## 310 0.005268244
## 311 0.005302771
## 312 0.005303673
## 313 0.005319682
## 314 0.005354621
## 315 0.005391774
## 316 0.005441549
## 317 0.005476293
## 318 0.005498287
## 319 0.005534105
## 320 0.005566535
## 321 0.005596767
## 322 0.005625026
## 323 0.005656057
## 324 0.005688997
## 325 0.005722473
## 326 0.005747940
## 327 0.005778586
## 328 0.005807661
## 329 0.005857593
## 330 0.005936078
## 331 0.006029165
## 332 0.006115599
## 333 0.006225645
## 334 0.006348454
## 335 0.006489842
## 336 0.006654313
## 337 0.006838090
## 338 0.007034817
## 339 0.007274745
## 340 0.007523649
## 341 0.007803761
## 342 0.008119825
## 343 0.008463581
## 344 0.008799167
## 345 0.009153127
## 346 0.009497719
## 347 0.009763061
## 348 0.009968577
## 349 0.010196629
## 350 0.010433435
## 351 0.004888249
## 352 0.004886608
## 353 0.004903728
## 354 0.004916386
## 355 0.004940518
## 356 0.004996826
## 357 0.005093387
## 358 0.005212215
## 359 0.005301009
## 360 0.005336129
## 361 0.005350485
## 362 0.005390858
## 363 0.005418525
## 364 0.005450408
## 365 0.005485345
## 366 0.005511742
## 367 0.005529202
## 368 0.005578611
## 369 0.005607542
## 370 0.005631359
## 371 0.005666022
## 372 0.005691898
## 373 0.005711246
## 374 0.005741461
## 375 0.005767129
## 376 0.005779014
## 377 0.005806189
## 378 0.005869231
## 379 0.005924512
## 380 0.006008126
## 381 0.006077371
## 382 0.006146633
## 383 0.006265439
## 384 0.006396397
## 385 0.006553766
## 386 0.006726931
## 387 0.006918113
## 388 0.007152820
## 389 0.007403453
## 390 0.007678086
## 391 0.007971498
## 392 0.008321146
## 393 0.008665756
## 394 0.008991843
## 395 0.009274031
## 396 0.009544625
## 397 0.009777553
## 398 0.010008701
## 399 0.010243928
## 400 0.010467814
## 401 0.005646656
## 402 0.005553625
## 403 0.005437730
## 404 0.005360514
## 405 0.005307996
## 406 0.005235328
## 407 0.005210034
## 408 0.005231219
## 409 0.005275222
## 410 0.005327076
## 411 0.005360524
## 412 0.005382878
## 413 0.005422309
## 414 0.005454768
## 415 0.005471555
## 416 0.005489502
## 417 0.005525277
## 418 0.005570949
## 419 0.005606238
## 420 0.005632598
## 421 0.005670504
## 422 0.005685715
## 423 0.005721930
## 424 0.005739544
## 425 0.005732268
## 426 0.005761066
## 427 0.005832549
## 428 0.005909481
## 429 0.005975883
## 430 0.006040471
## 431 0.006100840
## 432 0.006188958
## 433 0.006314353
## 434 0.006449528
## 435 0.006602115
## 436 0.006796917
## 437 0.007011935
## 438 0.007263008
## 439 0.007545796
## 440 0.007844062
## 441 0.008179116
## 442 0.008506911
## 443 0.008837320
## 444 0.009073942
## 445 0.009325063
## 446 0.009566158
## 447 0.009811639
## 448 0.010058993
## 449 0.010292833
## 450 0.010532931
## 451 0.005339752
## 452 0.005296911
## 453 0.005255910
## 454 0.005249416
## 455 0.005232885
## 456 0.005229190
## 457 0.005294380
## 458 0.005341979
## 459 0.005351427
## 460 0.005358708
## 461 0.005395147
## 462 0.005431369
## 463 0.005454653
## 464 0.005471385
## 465 0.005500509
## 466 0.005535455
## 467 0.005585091
## 468 0.005625554
## 469 0.005653086
## 470 0.005680168
## 471 0.005706056
## 472 0.005730967
## 473 0.005744854
## 474 0.005748746
## 475 0.005762252
## 476 0.005803307
## 477 0.005859475
## 478 0.005916020
## 479 0.005976505
## 480 0.006046429
## 481 0.006124487
## 482 0.006238495
## 483 0.006362767
## 484 0.006510739
## 485 0.006679567
## 486 0.006874877
## 487 0.007120624
## 488 0.007402774
## 489 0.007708539
## 490 0.008008162
## 491 0.008343102
## 492 0.008679472
## 493 0.008883685
## 494 0.009126227
## 495 0.009346557
## 496 0.009599318
## 497 0.009867230
## 498 0.010126813
## 499 0.010352763
## 500 0.010607286

Neural network

set.seed(1234)

nnet_default <- caret::train( y ~ .,
                              data = dfii,
                              method = 'nnet',
                              metric = my_metric,
                              preProcess = c('center', 'scale'),
                              trControl = my_ctrl,
                              trace = FALSE)
## Warning in nominalTrainWorkflow(x = x, y = y, wts = weights, info = trainInfo,
## : There were missing values in resampled performance measures.
nnet_default$bestTune
##   size decay
## 9    5   0.1
nnet_grid <- expand.grid( size = c(5,9,13,17,21), 
                          decay = exp(seq(-6, 0, length.out = 11)))

set.seed(1234)

nnet_tune <- caret::train( y ~ .,
                            data = dfii,
                            method = 'nnet',
                            metric = my_metric,
                            preProcess = c('center', 'scale'),
                            trControl = my_ctrl,
                            tuneGrid = nnet_grid,
                            trace = FALSE)
nnet_tune$bestTune
##    size       decay
## 47   21 0.008229747
plot(nnet_tune, xTrans=log)

Random froest

registerDoParallel(cores=8)
set.seed(1234)

rf_default <- caret::train( y ~ .,
                            data = dfii,
                            method = "rf",
                            trControl = my_ctrl,
                            metric = my_metric,
                            importance = TRUE)

rf_default$bestTune
##   mtry
## 2    9
rf_default$results
##   mtry       RMSE  Rsquared        MAE      RMSESD   RsquaredSD       MAESD
## 1    2 0.22429331 0.9767999 0.16492021 0.025547536 0.0044085567 0.017438581
## 2    9 0.06968765 0.9967131 0.05124449 0.007394860 0.0005828149 0.004599969
## 3   16 0.08476694 0.9949657 0.06163265 0.007351553 0.0008153420 0.005599433
registerDoParallel(cores=8)
set.seed(1234)

rf_grid <- expand.grid(.mtry = (2:20))
rf_tune <- caret::train( y ~ .,
                            data = dfii,
                            method = "rf",
                            trControl = my_ctrl,
                            tuneGrid = rf_grid,
                            metric = my_metric,
                            importance = TRUE)

rf_tune$bestTune
##   mtry
## 7    8
plot(rf_tune, xTrans=log)

Gradient boosted tree

registerDoParallel(cores=8)
set.seed(1234)

xgb_default <- caret::train(y ~ .,
                            data = dfii,
                            method = "xgbTree",
                            trControl = my_ctrl,
                            metric = my_metric,
                            verbosity = 0,
                            nthread = 1  )
xgb_default$bestTune
##    nrounds max_depth eta gamma colsample_bytree min_child_weight subsample
## 33     150         2 0.3     0              0.8                1      0.75
plot(xgb_default)

xgb_grid <- expand.grid(nrounds = seq(100, 2500, by = 300),
                        max_depth = c(3, 6, 9, 12),
                        eta = c(0.125, 0.25, 0.5) * xgb_default$bestTune$eta,
                        gamma = xgb_default$bestTune$gamma,
                        colsample_bytree = xgb_default$bestTune$colsample_bytree,
                        min_child_weight = xgb_default$bestTune$min_child_weight,
                        subsample = xgb_default$bestTune$subsample)
registerDoParallel(cores=8)
set.seed(1234)

xgb_tune <- caret::train(   y ~ .,
                            data = dfii,
                            method = "xgbTree",
                            trControl = my_ctrl,
                            metric = my_metric,
                            tuneGrid = xgb_grid,
                            verbosity = 0,
                            nthread = 1 )

xgb_tune$bestTune
##   nrounds max_depth    eta gamma colsample_bytree min_child_weight subsample
## 8    2200         3 0.0375     0              0.8                1      0.75
xgb_tune %>% plot()

#SVM

registerDoParallel(cores=8)
set.seed(1234)

svm <- caret::train( y ~ .,
                     data = dfii,
                     method = "svmRadial",
                     preProcess = c('center', 'scale'),
                     trControl = my_ctrl,
                     metric = my_metric)
svm$results
##        sigma    C      RMSE  Rsquared        MAE      RMSESD  RsquaredSD
## 1 0.04208154 0.25 0.1367684 0.9876522 0.10242319 0.014024510 0.002181577
## 2 0.04208154 0.50 0.1169683 0.9907386 0.08824700 0.010838741 0.001689743
## 3 0.04208154 1.00 0.1014462 0.9930260 0.07756597 0.009445028 0.001392402
##         MAESD
## 1 0.009292271
## 2 0.007834135
## 3 0.007083717
registerDoParallel(cores=8)
set.seed(1234)

sigma_values <- c(0.01, 0.03, 0.1, 1)
C_values <- c(0.25, 0.5, 1, 10, 100, 1000)
# Create the tuning grid
svm_grid <- expand.grid(sigma = sigma_values, C = C_values)

svm_tune <- caret::train( y ~ .,
                     data = dfii,
                     method = "svmRadial",
                     preProcess = c('center', 'scale'),
                     trControl = my_ctrl,
                     tuneGrid = svm_grid,
                     metric = my_metric)

svm_tune$bestTune
##   sigma   C
## 5  0.01 100
svm_tune$results
##    sigma       C       RMSE  Rsquared        MAE      RMSESD   RsquaredSD
## 1   0.01    0.25 0.12777795 0.9898228 0.09626653 0.015071645 0.0019356865
## 2   0.01    0.50 0.10896819 0.9920011 0.08193742 0.012590203 0.0015834416
## 3   0.01    1.00 0.09720543 0.9934760 0.07370577 0.010194209 0.0012359300
## 4   0.01   10.00 0.08017618 0.9957290 0.06513445 0.006808714 0.0008036555
## 5   0.01  100.00 0.07875694 0.9960889 0.06454914 0.006396148 0.0007903465
## 6   0.01 1000.00 0.07907856 0.9960585 0.06465774 0.006974880 0.0008243659
## 7   0.03    0.25 0.12936595 0.9889298 0.09713662 0.013829632 0.0020025915
## 8   0.03    0.50 0.11306541 0.9913085 0.08532935 0.010809715 0.0015570479
## 9   0.03    1.00 0.09883779 0.9933521 0.07585835 0.009012259 0.0012182063
## 10  0.03   10.00 0.08390817 0.9953818 0.06761251 0.008727717 0.0011020844
## 11  0.03  100.00 0.08335499 0.9954480 0.06720632 0.008360376 0.0010235638
## 12  0.03 1000.00 0.08361050 0.9954225 0.06729382 0.008013786 0.0009804075
## 13  0.10    0.25 0.17644351 0.9804149 0.12992477 0.018769715 0.0036338246
## 14  0.10    0.50 0.14522795 0.9859694 0.10905217 0.014388853 0.0029293035
## 15  0.10    1.00 0.12562099 0.9891770 0.09620281 0.013705532 0.0027085872
## 16  0.10   10.00 0.11202739 0.9912893 0.08594627 0.012756166 0.0022732438
## 17  0.10  100.00 0.11203188 0.9912965 0.08598945 0.012788991 0.0022808364
## 18  0.10 1000.00 0.11203188 0.9912965 0.08598945 0.012788991 0.0022808364
## 19  1.00    0.25 0.53583855 0.8806530 0.37533793 0.052490547 0.0268413572
## 20  1.00    0.50 0.37880698 0.9281586 0.25308602 0.046126011 0.0192836439
## 21  1.00    1.00 0.29744382 0.9480140 0.19900842 0.039281494 0.0158420011
## 22  1.00   10.00 0.28221300 0.9511905 0.18947958 0.037186028 0.0152540469
## 23  1.00  100.00 0.28221300 0.9511905 0.18947958 0.037186028 0.0152540469
## 24  1.00 1000.00 0.28221300 0.9511905 0.18947958 0.037186028 0.0152540469
##          MAESD
## 1  0.010352226
## 2  0.009009789
## 3  0.007709334
## 4  0.005415330
## 5  0.005697106
## 6  0.006251034
## 7  0.009341009
## 8  0.008125951
## 9  0.007241491
## 10 0.006776076
## 11 0.006400757
## 12 0.006124480
## 13 0.011388911
## 14 0.008261096
## 15 0.007986662
## 16 0.007575849
## 17 0.007497325
## 18 0.007497325
## 19 0.032414152
## 20 0.025946183
## 21 0.020642183
## 22 0.020141208
## 23 0.020141208
## 24 0.020141208

#PLS

registerDoParallel(cores=8)
set.seed(1234)

pls <- caret::train( y ~ .,
                        data = dfii,
                        method = "pls",
                        preProcess = c('center', 'scale'),
                        trControl = my_ctrl,
                        metric = my_metric)

pls$results
##   ncomp      RMSE  Rsquared       MAE     RMSESD  RsquaredSD      MAESD
## 1     1 0.2594890 0.9528249 0.1985486 0.02058900 0.007201300 0.01589521
## 2     2 0.1711272 0.9795025 0.1313449 0.01623436 0.003620325 0.01158483
## 3     3 0.1363831 0.9869397 0.1032318 0.01461153 0.002738435 0.01019301
registerDoParallel(cores=8)
set.seed(1234)
ncomp_values <- 1:10
pls_grid <- expand.grid(ncomp = ncomp_values)

pls_tune <- caret::train( y ~ .,
                        data = dfii,
                        method = "pls",
                        preProcess = c('center', 'scale'),
                        trControl = my_ctrl,
                        tuneGrid = pls_grid,
                        metric = my_metric)

pls_tune$bestTune
##   ncomp
## 9     9
pls_tune$results
##    ncomp       RMSE  Rsquared        MAE      RMSESD  RsquaredSD       MAESD
## 1      1 0.25948902 0.9528249 0.19854860 0.020589001 0.007201300 0.015895207
## 2      2 0.17112722 0.9795025 0.13134486 0.016234361 0.003620325 0.011584825
## 3      3 0.13638313 0.9869397 0.10323175 0.014611535 0.002738435 0.010193011
## 4      4 0.10206932 0.9927550 0.07769067 0.009472252 0.001344503 0.006968136
## 5      5 0.09567001 0.9936397 0.07096297 0.009358227 0.001257042 0.006499307
## 6      6 0.09316188 0.9939648 0.06942095 0.009363918 0.001195773 0.006103685
## 7      7 0.09229139 0.9940811 0.06838183 0.009404519 0.001201220 0.005987631
## 8      8 0.09100810 0.9942558 0.06736882 0.009045515 0.001147196 0.005989858
## 9      9 0.08916487 0.9944741 0.06714202 0.009046010 0.001097304 0.005961907
## 10    10 0.08918646 0.9944702 0.06718595 0.009105551 0.001106034 0.006023084
caret_acc_compare <- resamples(list(lm_01 = train_lm_01,
                                    lm_02 = train_lm_02,
                                    lm_09 = train_lm_09,
                                    lm_08 = train_lm_08,
                                    enet_01 = enet_tune_01,
                                    enet_09 = enet_tune_09,
                                    enet_08 = enet_tune_08,
                                    nnet_tune = nnet_tune,
                                    rf_default = rf_default,
                                    rf_tune = rf_tune,
                                    xgb_default = xgb_default,
                                    xgb_tune = xgb_tune,
                                    svm_default = svm,
                                    pls_default = pls,
                                    pls_tune = pls_tune))
dotplot(caret_acc_compare, metric = 'RMSE')

According to this question, the model “lm_08” has the lowest RMSE, the model with the lowest RMSE is generally considered to be the best, as it indicates the closest fit to the observed data, so here model “lm_08” is the